Lines Matching +full:memory +full:- +full:region
3 * SPDX-License-Identifier: Apache-2.0
30 * stored in memory sections spanning over memory regions.
31 * These are used to determine memory left for dynamic bounce buffer allocator to work with.
46 const struct dmm_region *region; member
57 static struct dmm_heap *dmm_heap_find(void *region) in dmm_heap_find() argument
63 if (dh->region->dt_addr == (uintptr_t)region) { in dmm_heap_find()
71 static bool is_region_cacheable(const struct dmm_region *region) in is_region_cacheable() argument
73 return (IS_ENABLED(CONFIG_DCACHE) && (region->dt_attr & DT_MEM_CACHEABLE)); in is_region_cacheable()
83 const struct dmm_region *region) in is_user_buffer_correctly_preallocated() argument
87 if (!is_buffer_within_region(addr, user_length, region->dt_addr, region->dt_size)) { in is_user_buffer_correctly_preallocated()
91 if (!is_region_cacheable(region)) { in is_user_buffer_correctly_preallocated()
92 /* Buffer is contained within non-cacheable region - use it as it is. */ in is_user_buffer_correctly_preallocated()
96 if (IS_ALIGNED(addr, region->dt_align)) { in is_user_buffer_correctly_preallocated()
97 /* If buffer is in cacheable region it must be aligned to data cache line size. */ in is_user_buffer_correctly_preallocated()
106 return ROUND_UP(dh->region->dt_allc, dh->region->dt_align); in dmm_heap_start_get()
111 return (dh->region->dt_size - (dmm_heap_start_get(dh) - dh->region->dt_addr)); in dmm_heap_size_get()
116 length = ROUND_UP(length, dh->region->dt_align); in dmm_buffer_alloc()
117 return sys_heap_aligned_alloc(&dh->heap, dh->region->dt_align, length); in dmm_buffer_alloc()
122 sys_heap_free(&dh->heap, buffer); in dmm_buffer_free()
125 int dmm_buffer_out_prepare(void *region, void const *user_buffer, size_t user_length, in dmm_buffer_out_prepare() argument
131 /* Assume that zero-length buffers are correct as they are. */ in dmm_buffer_out_prepare()
136 /* Get memory region that specified device can perform DMA transfers from */ in dmm_buffer_out_prepare()
137 dh = dmm_heap_find(region); in dmm_buffer_out_prepare()
139 return -EINVAL; in dmm_buffer_out_prepare()
143 * - provided user buffer is already in correct memory region, in dmm_buffer_out_prepare()
144 * - provided user buffer is aligned and padded to cache line, in dmm_buffer_out_prepare()
145 * if it is located in cacheable region. in dmm_buffer_out_prepare()
147 if (is_user_buffer_correctly_preallocated(user_buffer, user_length, dh->region)) { in dmm_buffer_out_prepare()
152 * - dynamically allocate buffer in correct memory region that respects cache line in dmm_buffer_out_prepare()
158 return -ENOMEM; in dmm_buffer_out_prepare()
160 /* - copy user buffer contents into allocated buffer */ in dmm_buffer_out_prepare()
164 /* Check if device memory region is cacheable in dmm_buffer_out_prepare()
168 if (is_region_cacheable(dh->region)) { in dmm_buffer_out_prepare()
176 int dmm_buffer_out_release(void *region, void *buffer_out) in dmm_buffer_out_release() argument
181 /* Get memory region that specified device can perform DMA transfers from */ in dmm_buffer_out_release()
182 dh = dmm_heap_find(region); in dmm_buffer_out_release()
184 return -EINVAL; in dmm_buffer_out_release()
187 /* Check if output buffer is contained within memory area in dmm_buffer_out_release()
188 * managed by dynamic memory allocator in dmm_buffer_out_release()
199 int dmm_buffer_in_prepare(void *region, void *user_buffer, size_t user_length, void **buffer_in) in dmm_buffer_in_prepare() argument
204 /* Assume that zero-length buffers are correct as they are. */ in dmm_buffer_in_prepare()
209 /* Get memory region that specified device can perform DMA transfers to */ in dmm_buffer_in_prepare()
210 dh = dmm_heap_find(region); in dmm_buffer_in_prepare()
212 return -EINVAL; in dmm_buffer_in_prepare()
216 * - provided user buffer is already in correct memory region, in dmm_buffer_in_prepare()
217 * - provided user buffer is aligned and padded to cache line, in dmm_buffer_in_prepare()
218 * if it is located in cacheable region. in dmm_buffer_in_prepare()
220 if (is_user_buffer_correctly_preallocated(user_buffer, user_length, dh->region)) { in dmm_buffer_in_prepare()
224 /* If no, dynamically allocate buffer in correct memory region that respects cache in dmm_buffer_in_prepare()
230 return -ENOMEM; in dmm_buffer_in_prepare()
234 /* Check if device memory region is cacheable in dmm_buffer_in_prepare()
238 if (is_region_cacheable(dh->region)) { in dmm_buffer_in_prepare()
246 int dmm_buffer_in_release(void *region, void *user_buffer, size_t user_length, void *buffer_in) in dmm_buffer_in_release() argument
251 /* Get memory region that specified device can perform DMA transfers to, using devicetree */ in dmm_buffer_in_release()
252 dh = dmm_heap_find(region); in dmm_buffer_in_release()
254 return -EINVAL; in dmm_buffer_in_release()
257 /* Check if device memory region is cacheable in dmm_buffer_in_release()
261 if (is_region_cacheable(dh->region)) { in dmm_buffer_in_release()
266 /* Check if user buffer and allocated buffer points to the same memory location in dmm_buffer_in_release()
274 /* Check if input buffer is contained within memory area in dmm_buffer_in_release()
275 * managed by dynamic memory allocator in dmm_buffer_in_release()
292 dh->region = &dmm_regions[idx]; in dmm_init()
293 sys_heap_init(&dh->heap, (void *)dmm_heap_start_get(dh), dmm_heap_size_get(dh)); in dmm_init()