Lines Matching full:cma

15 #define pr_fmt(fmt) "cma: " fmt
31 #include <linux/cma.h>
35 #include <trace/events/cma.h>
37 #include "cma.h"
39 struct cma cma_areas[MAX_CMA_AREAS];
43 phys_addr_t cma_get_base(const struct cma *cma) in cma_get_base() argument
45 return PFN_PHYS(cma->base_pfn); in cma_get_base()
48 unsigned long cma_get_size(const struct cma *cma) in cma_get_size() argument
50 return cma->count << PAGE_SHIFT; in cma_get_size()
53 const char *cma_get_name(const struct cma *cma) in cma_get_name() argument
55 return cma->name; in cma_get_name()
58 static unsigned long cma_bitmap_aligned_mask(const struct cma *cma, in cma_bitmap_aligned_mask() argument
61 if (align_order <= cma->order_per_bit) in cma_bitmap_aligned_mask()
63 return (1UL << (align_order - cma->order_per_bit)) - 1; in cma_bitmap_aligned_mask()
70 static unsigned long cma_bitmap_aligned_offset(const struct cma *cma, in cma_bitmap_aligned_offset() argument
73 return (cma->base_pfn & ((1UL << align_order) - 1)) in cma_bitmap_aligned_offset()
74 >> cma->order_per_bit; in cma_bitmap_aligned_offset()
77 static unsigned long cma_bitmap_pages_to_bits(const struct cma *cma, in cma_bitmap_pages_to_bits() argument
80 return ALIGN(pages, 1UL << cma->order_per_bit) >> cma->order_per_bit; in cma_bitmap_pages_to_bits()
83 static void cma_clear_bitmap(struct cma *cma, unsigned long pfn, in cma_clear_bitmap() argument
88 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit; in cma_clear_bitmap()
89 bitmap_count = cma_bitmap_pages_to_bits(cma, count); in cma_clear_bitmap()
91 mutex_lock(&cma->lock); in cma_clear_bitmap()
92 bitmap_clear(cma->bitmap, bitmap_no, bitmap_count); in cma_clear_bitmap()
93 mutex_unlock(&cma->lock); in cma_clear_bitmap()
96 static void __init cma_activate_area(struct cma *cma) in cma_activate_area() argument
98 unsigned long base_pfn = cma->base_pfn, pfn = base_pfn; in cma_activate_area()
99 unsigned i = cma->count >> pageblock_order; in cma_activate_area()
102 cma->bitmap = bitmap_zalloc(cma_bitmap_maxno(cma), GFP_KERNEL); in cma_activate_area()
103 if (!cma->bitmap) in cma_activate_area()
118 * simple by forcing the entire CMA resv range in cma_activate_area()
127 mutex_init(&cma->lock); in cma_activate_area()
130 INIT_HLIST_HEAD(&cma->mem_head); in cma_activate_area()
131 spin_lock_init(&cma->mem_head_lock); in cma_activate_area()
137 bitmap_free(cma->bitmap); in cma_activate_area()
139 cma->count = 0; in cma_activate_area()
140 pr_err("CMA area %s could not be activated\n", cma->name); in cma_activate_area()
163 * @res_cma: Pointer to store the created cma region.
170 struct cma **res_cma) in cma_init_reserved_mem()
172 struct cma *cma; in cma_init_reserved_mem() local
177 pr_err("Not enough slots for CMA reserved regions!\n"); in cma_init_reserved_mem()
199 cma = &cma_areas[cma_area_count]; in cma_init_reserved_mem()
202 snprintf(cma->name, CMA_MAX_NAME, name); in cma_init_reserved_mem()
204 snprintf(cma->name, CMA_MAX_NAME, "cma%d\n", cma_area_count); in cma_init_reserved_mem()
206 cma->base_pfn = PFN_DOWN(base); in cma_init_reserved_mem()
207 cma->count = size >> PAGE_SHIFT; in cma_init_reserved_mem()
208 cma->order_per_bit = order_per_bit; in cma_init_reserved_mem()
209 *res_cma = cma; in cma_init_reserved_mem()
221 * @alignment: Alignment for the CMA area, should be power of 2 or zero
225 * @res_cma: Pointer to store the created cma region.
239 bool fixed, const char *name, struct cma **res_cma, in cma_declare_contiguous_nid()
257 pr_err("Not enough slots for CMA reserved regions!\n"); in cma_declare_contiguous_nid()
269 * Pages both ends in CMA area could be merged into adjacent unmovable in cma_declare_contiguous_nid()
373 static void cma_debug_show_areas(struct cma *cma) in cma_debug_show_areas() argument
378 unsigned long nbits = cma_bitmap_maxno(cma); in cma_debug_show_areas()
380 mutex_lock(&cma->lock); in cma_debug_show_areas()
383 next_zero_bit = find_next_zero_bit(cma->bitmap, nbits, start); in cma_debug_show_areas()
386 next_set_bit = find_next_bit(cma->bitmap, nbits, next_zero_bit); in cma_debug_show_areas()
388 nr_part = nr_zero << cma->order_per_bit; in cma_debug_show_areas()
394 pr_cont("=> %lu free of %lu total pages\n", nr_total, cma->count); in cma_debug_show_areas()
395 mutex_unlock(&cma->lock); in cma_debug_show_areas()
398 static inline void cma_debug_show_areas(struct cma *cma) { } in cma_debug_show_areas() argument
403 * @cma: Contiguous memory region for which the allocation is performed.
411 struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align, in cma_alloc() argument
422 if (!cma || !cma->count || !cma->bitmap) in cma_alloc()
425 pr_debug("%s(cma %p, count %zu, align %d)\n", __func__, (void *)cma, in cma_alloc()
431 mask = cma_bitmap_aligned_mask(cma, align); in cma_alloc()
432 offset = cma_bitmap_aligned_offset(cma, align); in cma_alloc()
433 bitmap_maxno = cma_bitmap_maxno(cma); in cma_alloc()
434 bitmap_count = cma_bitmap_pages_to_bits(cma, count); in cma_alloc()
440 mutex_lock(&cma->lock); in cma_alloc()
441 bitmap_no = bitmap_find_next_zero_area_off(cma->bitmap, in cma_alloc()
445 mutex_unlock(&cma->lock); in cma_alloc()
448 bitmap_set(cma->bitmap, bitmap_no, bitmap_count); in cma_alloc()
454 mutex_unlock(&cma->lock); in cma_alloc()
456 pfn = cma->base_pfn + (bitmap_no << cma->order_per_bit); in cma_alloc()
466 cma_clear_bitmap(cma, pfn, count); in cma_alloc()
479 * CMA can allocate multiple page blocks, which results in different in cma_alloc()
491 cma_debug_show_areas(cma); in cma_alloc()
500 * @cma: Contiguous memory region for which the allocation is performed.
508 bool cma_release(struct cma *cma, const struct page *pages, unsigned int count) in cma_release() argument
512 if (!cma || !pages) in cma_release()
519 if (pfn < cma->base_pfn || pfn >= cma->base_pfn + cma->count) in cma_release()
522 VM_BUG_ON(pfn + count > cma->base_pfn + cma->count); in cma_release()
525 cma_clear_bitmap(cma, pfn, count); in cma_release()
531 int cma_for_each_area(int (*it)(struct cma *cma, void *data), void *data) in cma_for_each_area() argument