Lines Matching full:region
46 const struct dmm_region *region; member
57 static struct dmm_heap *dmm_heap_find(void *region) in dmm_heap_find() argument
63 if (dh->region->dt_addr == (uintptr_t)region) { in dmm_heap_find()
71 static bool is_region_cacheable(const struct dmm_region *region) in is_region_cacheable() argument
73 return (IS_ENABLED(CONFIG_DCACHE) && (region->dt_attr & DT_MEM_CACHEABLE)); in is_region_cacheable()
83 const struct dmm_region *region) in is_user_buffer_correctly_preallocated() argument
87 if (!is_buffer_within_region(addr, user_length, region->dt_addr, region->dt_size)) { in is_user_buffer_correctly_preallocated()
91 if (!is_region_cacheable(region)) { in is_user_buffer_correctly_preallocated()
92 /* Buffer is contained within non-cacheable region - use it as it is. */ in is_user_buffer_correctly_preallocated()
96 if (IS_ALIGNED(addr, region->dt_align)) { in is_user_buffer_correctly_preallocated()
97 /* If buffer is in cacheable region it must be aligned to data cache line size. */ in is_user_buffer_correctly_preallocated()
106 return ROUND_UP(dh->region->dt_allc, dh->region->dt_align); in dmm_heap_start_get()
111 return (dh->region->dt_size - (dmm_heap_start_get(dh) - dh->region->dt_addr)); in dmm_heap_size_get()
116 length = ROUND_UP(length, dh->region->dt_align); in dmm_buffer_alloc()
117 return sys_heap_aligned_alloc(&dh->heap, dh->region->dt_align, length); in dmm_buffer_alloc()
125 int dmm_buffer_out_prepare(void *region, void const *user_buffer, size_t user_length, in dmm_buffer_out_prepare() argument
136 /* Get memory region that specified device can perform DMA transfers from */ in dmm_buffer_out_prepare()
137 dh = dmm_heap_find(region); in dmm_buffer_out_prepare()
143 * - provided user buffer is already in correct memory region, in dmm_buffer_out_prepare()
145 * if it is located in cacheable region. in dmm_buffer_out_prepare()
147 if (is_user_buffer_correctly_preallocated(user_buffer, user_length, dh->region)) { in dmm_buffer_out_prepare()
152 * - dynamically allocate buffer in correct memory region that respects cache line in dmm_buffer_out_prepare()
164 /* Check if device memory region is cacheable in dmm_buffer_out_prepare()
168 if (is_region_cacheable(dh->region)) { in dmm_buffer_out_prepare()
176 int dmm_buffer_out_release(void *region, void *buffer_out) in dmm_buffer_out_release() argument
181 /* Get memory region that specified device can perform DMA transfers from */ in dmm_buffer_out_release()
182 dh = dmm_heap_find(region); in dmm_buffer_out_release()
199 int dmm_buffer_in_prepare(void *region, void *user_buffer, size_t user_length, void **buffer_in) in dmm_buffer_in_prepare() argument
209 /* Get memory region that specified device can perform DMA transfers to */ in dmm_buffer_in_prepare()
210 dh = dmm_heap_find(region); in dmm_buffer_in_prepare()
216 * - provided user buffer is already in correct memory region, in dmm_buffer_in_prepare()
218 * if it is located in cacheable region. in dmm_buffer_in_prepare()
220 if (is_user_buffer_correctly_preallocated(user_buffer, user_length, dh->region)) { in dmm_buffer_in_prepare()
224 /* If no, dynamically allocate buffer in correct memory region that respects cache in dmm_buffer_in_prepare()
234 /* Check if device memory region is cacheable in dmm_buffer_in_prepare()
238 if (is_region_cacheable(dh->region)) { in dmm_buffer_in_prepare()
246 int dmm_buffer_in_release(void *region, void *user_buffer, size_t user_length, void *buffer_in) in dmm_buffer_in_release() argument
251 /* Get memory region that specified device can perform DMA transfers to, using devicetree */ in dmm_buffer_in_release()
252 dh = dmm_heap_find(region); in dmm_buffer_in_release()
257 /* Check if device memory region is cacheable in dmm_buffer_in_release()
261 if (is_region_cacheable(dh->region)) { in dmm_buffer_in_release()
292 dh->region = &dmm_regions[idx]; in dmm_init()