Lines Matching refs:cma

43 struct cma cma_areas[MAX_CMA_AREAS];
47 phys_addr_t cma_get_base(const struct cma *cma) in cma_get_base() argument
49 return PFN_PHYS(cma->base_pfn); in cma_get_base()
52 unsigned long cma_get_size(const struct cma *cma) in cma_get_size() argument
54 return cma->count << PAGE_SHIFT; in cma_get_size()
57 const char *cma_get_name(const struct cma *cma) in cma_get_name() argument
59 return cma->name ? cma->name : "(undefined)"; in cma_get_name()
62 static unsigned long cma_bitmap_aligned_mask(const struct cma *cma, in cma_bitmap_aligned_mask() argument
65 if (align_order <= cma->order_per_bit) in cma_bitmap_aligned_mask()
67 return (1UL << (align_order - cma->order_per_bit)) - 1; in cma_bitmap_aligned_mask()
74 static unsigned long cma_bitmap_aligned_offset(const struct cma *cma, in cma_bitmap_aligned_offset() argument
77 return (cma->base_pfn & ((1UL << align_order) - 1)) in cma_bitmap_aligned_offset()
78 >> cma->order_per_bit; in cma_bitmap_aligned_offset()
81 static unsigned long cma_bitmap_pages_to_bits(const struct cma *cma, in cma_bitmap_pages_to_bits() argument
84 return ALIGN(pages, 1UL << cma->order_per_bit) >> cma->order_per_bit; in cma_bitmap_pages_to_bits()
87 static void cma_clear_bitmap(struct cma *cma, unsigned long pfn, in cma_clear_bitmap() argument
92 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit; in cma_clear_bitmap()
93 bitmap_count = cma_bitmap_pages_to_bits(cma, count); in cma_clear_bitmap()
95 mutex_lock(&cma->lock); in cma_clear_bitmap()
96 bitmap_clear(cma->bitmap, bitmap_no, bitmap_count); in cma_clear_bitmap()
97 mutex_unlock(&cma->lock); in cma_clear_bitmap()
100 static int __init cma_activate_area(struct cma *cma) in cma_activate_area() argument
102 int bitmap_size = BITS_TO_LONGS(cma_bitmap_maxno(cma)) * sizeof(long); in cma_activate_area()
103 unsigned long base_pfn = cma->base_pfn, pfn = base_pfn; in cma_activate_area()
104 unsigned i = cma->count >> pageblock_order; in cma_activate_area()
107 cma->bitmap = kzalloc(bitmap_size, GFP_KERNEL); in cma_activate_area()
109 if (!cma->bitmap) in cma_activate_area()
133 mutex_init(&cma->lock); in cma_activate_area()
136 INIT_HLIST_HEAD(&cma->mem_head); in cma_activate_area()
137 spin_lock_init(&cma->mem_head_lock); in cma_activate_area()
143 pr_err("CMA area %s could not be activated\n", cma->name); in cma_activate_area()
144 kfree(cma->bitmap); in cma_activate_area()
145 cma->count = 0; in cma_activate_area()
179 struct cma **res_cma) in cma_init_reserved_mem()
181 struct cma *cma; in cma_init_reserved_mem() local
208 cma = &cma_areas[cma_area_count]; in cma_init_reserved_mem()
210 cma->name = name; in cma_init_reserved_mem()
212 cma->name = kasprintf(GFP_KERNEL, "cma%d\n", cma_area_count); in cma_init_reserved_mem()
213 if (!cma->name) in cma_init_reserved_mem()
216 cma->base_pfn = PFN_DOWN(base); in cma_init_reserved_mem()
217 cma->count = size >> PAGE_SHIFT; in cma_init_reserved_mem()
218 cma->order_per_bit = order_per_bit; in cma_init_reserved_mem()
219 *res_cma = cma; in cma_init_reserved_mem()
248 bool fixed, const char *name, struct cma **res_cma) in cma_declare_contiguous()
368 static void cma_debug_show_areas(struct cma *cma) in cma_debug_show_areas() argument
374 mutex_lock(&cma->lock); in cma_debug_show_areas()
377 next_zero_bit = find_next_zero_bit(cma->bitmap, cma->count, start); in cma_debug_show_areas()
378 if (next_zero_bit >= cma->count) in cma_debug_show_areas()
380 next_set_bit = find_next_bit(cma->bitmap, cma->count, next_zero_bit); in cma_debug_show_areas()
386 pr_cont("=> %u free of %lu total pages\n", nr_total, cma->count); in cma_debug_show_areas()
387 mutex_unlock(&cma->lock); in cma_debug_show_areas()
390 static inline void cma_debug_show_areas(struct cma *cma) { } in cma_debug_show_areas() argument
403 struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align, in cma_alloc() argument
413 if (!cma || !cma->count) in cma_alloc()
416 pr_debug("%s(cma %p, count %zu, align %d)\n", __func__, (void *)cma, in cma_alloc()
422 mask = cma_bitmap_aligned_mask(cma, align); in cma_alloc()
423 offset = cma_bitmap_aligned_offset(cma, align); in cma_alloc()
424 bitmap_maxno = cma_bitmap_maxno(cma); in cma_alloc()
425 bitmap_count = cma_bitmap_pages_to_bits(cma, count); in cma_alloc()
431 mutex_lock(&cma->lock); in cma_alloc()
432 bitmap_no = bitmap_find_next_zero_area_off(cma->bitmap, in cma_alloc()
436 mutex_unlock(&cma->lock); in cma_alloc()
439 bitmap_set(cma->bitmap, bitmap_no, bitmap_count); in cma_alloc()
445 mutex_unlock(&cma->lock); in cma_alloc()
447 pfn = cma->base_pfn + (bitmap_no << cma->order_per_bit); in cma_alloc()
457 cma_clear_bitmap(cma, pfn, count); in cma_alloc()
472 cma_debug_show_areas(cma); in cma_alloc()
489 bool cma_release(struct cma *cma, const struct page *pages, unsigned int count) in cma_release() argument
493 if (!cma || !pages) in cma_release()
500 if (pfn < cma->base_pfn || pfn >= cma->base_pfn + cma->count) in cma_release()
503 VM_BUG_ON(pfn + count > cma->base_pfn + cma->count); in cma_release()
506 cma_clear_bitmap(cma, pfn, count); in cma_release()
512 int cma_for_each_area(int (*it)(struct cma *cma, void *data), void *data) in cma_for_each_area() argument