Lines Matching full:buffer
15 void init_heap_helper_buffer(struct heap_helper_buffer *buffer, in init_heap_helper_buffer() argument
18 buffer->priv_virt = NULL; in init_heap_helper_buffer()
19 mutex_init(&buffer->lock); in init_heap_helper_buffer()
20 buffer->vmap_cnt = 0; in init_heap_helper_buffer()
21 buffer->vaddr = NULL; in init_heap_helper_buffer()
22 buffer->pagecount = 0; in init_heap_helper_buffer()
23 buffer->pages = NULL; in init_heap_helper_buffer()
24 INIT_LIST_HEAD(&buffer->attachments); in init_heap_helper_buffer()
25 buffer->free = free; in init_heap_helper_buffer()
28 struct dma_buf *heap_helper_export_dmabuf(struct heap_helper_buffer *buffer, in heap_helper_export_dmabuf() argument
34 exp_info.size = buffer->size; in heap_helper_export_dmabuf()
36 exp_info.priv = buffer; in heap_helper_export_dmabuf()
41 static void *dma_heap_map_kernel(struct heap_helper_buffer *buffer) in dma_heap_map_kernel() argument
45 vaddr = vmap(buffer->pages, buffer->pagecount, VM_MAP, PAGE_KERNEL); in dma_heap_map_kernel()
52 static void dma_heap_buffer_destroy(struct heap_helper_buffer *buffer) in dma_heap_buffer_destroy() argument
54 if (buffer->vmap_cnt > 0) { in dma_heap_buffer_destroy()
55 WARN(1, "%s: buffer still mapped in the kernel\n", __func__); in dma_heap_buffer_destroy()
56 vunmap(buffer->vaddr); in dma_heap_buffer_destroy()
59 buffer->free(buffer); in dma_heap_buffer_destroy()
62 static void *dma_heap_buffer_vmap_get(struct heap_helper_buffer *buffer) in dma_heap_buffer_vmap_get() argument
66 if (buffer->vmap_cnt) { in dma_heap_buffer_vmap_get()
67 buffer->vmap_cnt++; in dma_heap_buffer_vmap_get()
68 return buffer->vaddr; in dma_heap_buffer_vmap_get()
70 vaddr = dma_heap_map_kernel(buffer); in dma_heap_buffer_vmap_get()
73 buffer->vaddr = vaddr; in dma_heap_buffer_vmap_get()
74 buffer->vmap_cnt++; in dma_heap_buffer_vmap_get()
78 static void dma_heap_buffer_vmap_put(struct heap_helper_buffer *buffer) in dma_heap_buffer_vmap_put() argument
80 if (!--buffer->vmap_cnt) { in dma_heap_buffer_vmap_put()
81 vunmap(buffer->vaddr); in dma_heap_buffer_vmap_put()
82 buffer->vaddr = NULL; in dma_heap_buffer_vmap_put()
96 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_attach() local
103 ret = sg_alloc_table_from_pages(&a->table, buffer->pages, in dma_heap_attach()
104 buffer->pagecount, 0, in dma_heap_attach()
105 buffer->pagecount << PAGE_SHIFT, in dma_heap_attach()
117 mutex_lock(&buffer->lock); in dma_heap_attach()
118 list_add(&a->list, &buffer->attachments); in dma_heap_attach()
119 mutex_unlock(&buffer->lock); in dma_heap_attach()
128 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_detach() local
130 mutex_lock(&buffer->lock); in dma_heap_detach()
132 mutex_unlock(&buffer->lock); in dma_heap_detach()
162 struct heap_helper_buffer *buffer = vma->vm_private_data; in dma_heap_vm_fault() local
164 if (vmf->pgoff > buffer->pagecount) in dma_heap_vm_fault()
167 vmf->page = buffer->pages[vmf->pgoff]; in dma_heap_vm_fault()
179 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_mmap() local
185 vma->vm_private_data = buffer; in dma_heap_mmap()
192 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_dma_buf_release() local
194 dma_heap_buffer_destroy(buffer); in dma_heap_dma_buf_release()
200 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_dma_buf_begin_cpu_access() local
204 mutex_lock(&buffer->lock); in dma_heap_dma_buf_begin_cpu_access()
206 if (buffer->vmap_cnt) in dma_heap_dma_buf_begin_cpu_access()
207 invalidate_kernel_vmap_range(buffer->vaddr, buffer->size); in dma_heap_dma_buf_begin_cpu_access()
209 list_for_each_entry(a, &buffer->attachments, list) { in dma_heap_dma_buf_begin_cpu_access()
213 mutex_unlock(&buffer->lock); in dma_heap_dma_buf_begin_cpu_access()
221 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_dma_buf_end_cpu_access() local
224 mutex_lock(&buffer->lock); in dma_heap_dma_buf_end_cpu_access()
226 if (buffer->vmap_cnt) in dma_heap_dma_buf_end_cpu_access()
227 flush_kernel_vmap_range(buffer->vaddr, buffer->size); in dma_heap_dma_buf_end_cpu_access()
229 list_for_each_entry(a, &buffer->attachments, list) { in dma_heap_dma_buf_end_cpu_access()
233 mutex_unlock(&buffer->lock); in dma_heap_dma_buf_end_cpu_access()
240 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_dma_buf_vmap() local
243 mutex_lock(&buffer->lock); in dma_heap_dma_buf_vmap()
244 vaddr = dma_heap_buffer_vmap_get(buffer); in dma_heap_dma_buf_vmap()
245 mutex_unlock(&buffer->lock); in dma_heap_dma_buf_vmap()
252 struct heap_helper_buffer *buffer = dmabuf->priv; in dma_heap_dma_buf_vunmap() local
254 mutex_lock(&buffer->lock); in dma_heap_dma_buf_vunmap()
255 dma_heap_buffer_vmap_put(buffer); in dma_heap_dma_buf_vunmap()
256 mutex_unlock(&buffer->lock); in dma_heap_dma_buf_vunmap()