Lines Matching full:buffer
53 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_attach() local
61 ret = sg_alloc_table_from_pages(&a->table, buffer->pages, in cma_heap_attach()
62 buffer->pagecount, 0, in cma_heap_attach()
63 buffer->pagecount << PAGE_SHIFT, in cma_heap_attach()
76 mutex_lock(&buffer->lock); in cma_heap_attach()
77 list_add(&a->list, &buffer->attachments); in cma_heap_attach()
78 mutex_unlock(&buffer->lock); in cma_heap_attach()
86 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_detach() local
89 mutex_lock(&buffer->lock); in cma_heap_detach()
91 mutex_unlock(&buffer->lock); in cma_heap_detach()
124 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_dma_buf_begin_cpu_access() local
127 if (buffer->vmap_cnt) in cma_heap_dma_buf_begin_cpu_access()
128 invalidate_kernel_vmap_range(buffer->vaddr, buffer->len); in cma_heap_dma_buf_begin_cpu_access()
130 mutex_lock(&buffer->lock); in cma_heap_dma_buf_begin_cpu_access()
131 list_for_each_entry(a, &buffer->attachments, list) { in cma_heap_dma_buf_begin_cpu_access()
136 mutex_unlock(&buffer->lock); in cma_heap_dma_buf_begin_cpu_access()
144 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_dma_buf_end_cpu_access() local
147 if (buffer->vmap_cnt) in cma_heap_dma_buf_end_cpu_access()
148 flush_kernel_vmap_range(buffer->vaddr, buffer->len); in cma_heap_dma_buf_end_cpu_access()
150 mutex_lock(&buffer->lock); in cma_heap_dma_buf_end_cpu_access()
151 list_for_each_entry(a, &buffer->attachments, list) { in cma_heap_dma_buf_end_cpu_access()
156 mutex_unlock(&buffer->lock); in cma_heap_dma_buf_end_cpu_access()
164 struct cma_heap_buffer *buffer = vma->vm_private_data; in cma_heap_vm_fault() local
166 if (vmf->pgoff > buffer->pagecount) in cma_heap_vm_fault()
169 vmf->page = buffer->pages[vmf->pgoff]; in cma_heap_vm_fault()
181 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_mmap() local
187 vma->vm_private_data = buffer; in cma_heap_mmap()
192 static void *cma_heap_do_vmap(struct cma_heap_buffer *buffer) in cma_heap_do_vmap() argument
196 vaddr = vmap(buffer->pages, buffer->pagecount, VM_MAP, PAGE_KERNEL); in cma_heap_do_vmap()
205 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_vmap() local
209 mutex_lock(&buffer->lock); in cma_heap_vmap()
210 if (buffer->vmap_cnt) { in cma_heap_vmap()
211 buffer->vmap_cnt++; in cma_heap_vmap()
212 dma_buf_map_set_vaddr(map, buffer->vaddr); in cma_heap_vmap()
216 vaddr = cma_heap_do_vmap(buffer); in cma_heap_vmap()
221 buffer->vaddr = vaddr; in cma_heap_vmap()
222 buffer->vmap_cnt++; in cma_heap_vmap()
223 dma_buf_map_set_vaddr(map, buffer->vaddr); in cma_heap_vmap()
225 mutex_unlock(&buffer->lock); in cma_heap_vmap()
232 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_vunmap() local
234 mutex_lock(&buffer->lock); in cma_heap_vunmap()
235 if (!--buffer->vmap_cnt) { in cma_heap_vunmap()
236 vunmap(buffer->vaddr); in cma_heap_vunmap()
237 buffer->vaddr = NULL; in cma_heap_vunmap()
239 mutex_unlock(&buffer->lock); in cma_heap_vunmap()
245 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_dma_buf_release() local
246 struct cma_heap *cma_heap = buffer->heap; in cma_heap_dma_buf_release()
248 if (buffer->vmap_cnt > 0) { in cma_heap_dma_buf_release()
249 WARN(1, "%s: buffer still mapped in the kernel\n", __func__); in cma_heap_dma_buf_release()
250 vunmap(buffer->vaddr); in cma_heap_dma_buf_release()
251 buffer->vaddr = NULL; in cma_heap_dma_buf_release()
255 kfree(buffer->pages); in cma_heap_dma_buf_release()
257 cma_release(cma_heap->cma, buffer->cma_pages, buffer->pagecount); in cma_heap_dma_buf_release()
258 kfree(buffer); in cma_heap_dma_buf_release()
280 struct cma_heap_buffer *buffer; in cma_heap_allocate() local
290 buffer = kzalloc(sizeof(*buffer), GFP_KERNEL); in cma_heap_allocate()
291 if (!buffer) in cma_heap_allocate()
294 INIT_LIST_HEAD(&buffer->attachments); in cma_heap_allocate()
295 mutex_init(&buffer->lock); in cma_heap_allocate()
296 buffer->len = size; in cma_heap_allocate()
328 buffer->pages = kmalloc_array(pagecount, sizeof(*buffer->pages), GFP_KERNEL); in cma_heap_allocate()
329 if (!buffer->pages) { in cma_heap_allocate()
335 buffer->pages[pg] = &cma_pages[pg]; in cma_heap_allocate()
337 buffer->cma_pages = cma_pages; in cma_heap_allocate()
338 buffer->heap = cma_heap; in cma_heap_allocate()
339 buffer->pagecount = pagecount; in cma_heap_allocate()
344 exp_info.size = buffer->len; in cma_heap_allocate()
346 exp_info.priv = buffer; in cma_heap_allocate()
355 kfree(buffer->pages); in cma_heap_allocate()
359 kfree(buffer); in cma_heap_allocate()