Lines Matching +full:dma +full:- +full:mem

1 // SPDX-License-Identifier: GPL-2.0-only
3 * helper functions for SG DMA video4linux capture buffers
12 * Highly based on video-buf written originally by:
26 #include <linux/dma-mapping.h>
32 #include <media/videobuf-dma-sg.h>
47 MODULE_DESCRIPTION("helper module to manage video4linux dma sg buffers");
53 printk(KERN_DEBUG "vbuf-sg: " fmt , ## arg)
55 /* --------------------------------------------------------------------- */
58 * Return a scatterlist for some page-aligned vmalloc()'ed memory
106 /* DMA to highmem pages might not work */ in videobuf_pages_to_sg()
109 min_t(size_t, PAGE_SIZE - offset, size), offset); in videobuf_pages_to_sg()
110 size -= min_t(size_t, PAGE_SIZE - offset, size); in videobuf_pages_to_sg()
117 size -= min_t(size_t, PAGE_SIZE, size); in videobuf_pages_to_sg()
122 dprintk(2, "sgl: oops - no page\n"); in videobuf_pages_to_sg()
127 dprintk(2, "sgl: oops - highmem page\n"); in videobuf_pages_to_sg()
132 /* --------------------------------------------------------------------- */
136 struct videobuf_dma_sg_memory *mem = buf->priv; in videobuf_to_dma() local
137 BUG_ON(!mem); in videobuf_to_dma()
139 MAGIC_CHECK(mem->magic, MAGIC_SG_MEM); in videobuf_to_dma()
141 return &mem->dma; in videobuf_to_dma()
145 static void videobuf_dma_init(struct videobuf_dmabuf *dma) in videobuf_dma_init() argument
147 memset(dma, 0, sizeof(*dma)); in videobuf_dma_init()
148 dma->magic = MAGIC_DMABUF; in videobuf_dma_init()
151 static int videobuf_dma_init_user_locked(struct videobuf_dmabuf *dma, in videobuf_dma_init_user_locked() argument
158 dma->direction = direction; in videobuf_dma_init_user_locked()
159 switch (dma->direction) { in videobuf_dma_init_user_locked()
171 last = ((data+size-1) & PAGE_MASK) >> PAGE_SHIFT; in videobuf_dma_init_user_locked()
172 dma->offset = data & ~PAGE_MASK; in videobuf_dma_init_user_locked()
173 dma->size = size; in videobuf_dma_init_user_locked()
174 dma->nr_pages = last-first+1; in videobuf_dma_init_user_locked()
175 dma->pages = kmalloc_array(dma->nr_pages, sizeof(struct page *), in videobuf_dma_init_user_locked()
177 if (NULL == dma->pages) in videobuf_dma_init_user_locked()
178 return -ENOMEM; in videobuf_dma_init_user_locked()
184 data, size, dma->nr_pages); in videobuf_dma_init_user_locked()
186 err = pin_user_pages(data & PAGE_MASK, dma->nr_pages, in videobuf_dma_init_user_locked()
187 flags | FOLL_LONGTERM, dma->pages, NULL); in videobuf_dma_init_user_locked()
189 if (err != dma->nr_pages) { in videobuf_dma_init_user_locked()
190 dma->nr_pages = (err >= 0) ? err : 0; in videobuf_dma_init_user_locked()
192 dma->nr_pages); in videobuf_dma_init_user_locked()
193 return err < 0 ? err : -EINVAL; in videobuf_dma_init_user_locked()
198 static int videobuf_dma_init_user(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_user() argument
203 mmap_read_lock(current->mm); in videobuf_dma_init_user()
204 ret = videobuf_dma_init_user_locked(dma, direction, data, size); in videobuf_dma_init_user()
205 mmap_read_unlock(current->mm); in videobuf_dma_init_user()
210 static int videobuf_dma_init_kernel(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_kernel() argument
217 dma->direction = direction; in videobuf_dma_init_kernel()
218 dma->vaddr_pages = kcalloc(nr_pages, sizeof(*dma->vaddr_pages), in videobuf_dma_init_kernel()
220 if (!dma->vaddr_pages) in videobuf_dma_init_kernel()
221 return -ENOMEM; in videobuf_dma_init_kernel()
223 dma->dma_addr = kcalloc(nr_pages, sizeof(*dma->dma_addr), GFP_KERNEL); in videobuf_dma_init_kernel()
224 if (!dma->dma_addr) { in videobuf_dma_init_kernel()
225 kfree(dma->vaddr_pages); in videobuf_dma_init_kernel()
226 return -ENOMEM; in videobuf_dma_init_kernel()
231 addr = dma_alloc_coherent(dma->dev, PAGE_SIZE, in videobuf_dma_init_kernel()
232 &(dma->dma_addr[i]), GFP_KERNEL); in videobuf_dma_init_kernel()
236 dma->vaddr_pages[i] = virt_to_page(addr); in videobuf_dma_init_kernel()
238 dma->vaddr = vmap(dma->vaddr_pages, nr_pages, VM_MAP | VM_IOREMAP, in videobuf_dma_init_kernel()
240 if (NULL == dma->vaddr) { in videobuf_dma_init_kernel()
246 dma->vaddr, nr_pages << PAGE_SHIFT); in videobuf_dma_init_kernel()
248 memset(dma->vaddr, 0, nr_pages << PAGE_SHIFT); in videobuf_dma_init_kernel()
249 dma->nr_pages = nr_pages; in videobuf_dma_init_kernel()
256 i--; in videobuf_dma_init_kernel()
257 addr = page_address(dma->vaddr_pages[i]); in videobuf_dma_init_kernel()
258 dma_free_coherent(dma->dev, PAGE_SIZE, addr, dma->dma_addr[i]); in videobuf_dma_init_kernel()
260 kfree(dma->dma_addr); in videobuf_dma_init_kernel()
261 dma->dma_addr = NULL; in videobuf_dma_init_kernel()
262 kfree(dma->vaddr_pages); in videobuf_dma_init_kernel()
263 dma->vaddr_pages = NULL; in videobuf_dma_init_kernel()
265 return -ENOMEM; in videobuf_dma_init_kernel()
269 static int videobuf_dma_init_overlay(struct videobuf_dmabuf *dma, int direction, in videobuf_dma_init_overlay() argument
274 dma->direction = direction; in videobuf_dma_init_overlay()
277 return -EINVAL; in videobuf_dma_init_overlay()
279 dma->bus_addr = addr; in videobuf_dma_init_overlay()
280 dma->nr_pages = nr_pages; in videobuf_dma_init_overlay()
285 static int videobuf_dma_map(struct device *dev, struct videobuf_dmabuf *dma) in videobuf_dma_map() argument
287 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_map()
288 BUG_ON(0 == dma->nr_pages); in videobuf_dma_map()
290 if (dma->pages) { in videobuf_dma_map()
291 dma->sglist = videobuf_pages_to_sg(dma->pages, dma->nr_pages, in videobuf_dma_map()
292 dma->offset, dma->size); in videobuf_dma_map()
294 if (dma->vaddr) { in videobuf_dma_map()
295 dma->sglist = videobuf_vmalloc_to_sg(dma->vaddr, in videobuf_dma_map()
296 dma->nr_pages); in videobuf_dma_map()
298 if (dma->bus_addr) { in videobuf_dma_map()
299 dma->sglist = vmalloc(sizeof(*dma->sglist)); in videobuf_dma_map()
300 if (NULL != dma->sglist) { in videobuf_dma_map()
301 dma->sglen = 1; in videobuf_dma_map()
302 sg_dma_address(&dma->sglist[0]) = dma->bus_addr in videobuf_dma_map()
304 dma->sglist[0].offset = dma->bus_addr & ~PAGE_MASK; in videobuf_dma_map()
305 sg_dma_len(&dma->sglist[0]) = dma->nr_pages * PAGE_SIZE; in videobuf_dma_map()
308 if (NULL == dma->sglist) { in videobuf_dma_map()
310 return -ENOMEM; in videobuf_dma_map()
312 if (!dma->bus_addr) { in videobuf_dma_map()
313 dma->sglen = dma_map_sg(dev, dma->sglist, in videobuf_dma_map()
314 dma->nr_pages, dma->direction); in videobuf_dma_map()
315 if (0 == dma->sglen) { in videobuf_dma_map()
318 vfree(dma->sglist); in videobuf_dma_map()
319 dma->sglist = NULL; in videobuf_dma_map()
320 dma->sglen = 0; in videobuf_dma_map()
321 return -ENOMEM; in videobuf_dma_map()
328 int videobuf_dma_unmap(struct device *dev, struct videobuf_dmabuf *dma) in videobuf_dma_unmap() argument
330 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_unmap()
332 if (!dma->sglen) in videobuf_dma_unmap()
335 dma_unmap_sg(dev, dma->sglist, dma->nr_pages, dma->direction); in videobuf_dma_unmap()
337 vfree(dma->sglist); in videobuf_dma_unmap()
338 dma->sglist = NULL; in videobuf_dma_unmap()
339 dma->sglen = 0; in videobuf_dma_unmap()
345 int videobuf_dma_free(struct videobuf_dmabuf *dma) in videobuf_dma_free() argument
348 MAGIC_CHECK(dma->magic, MAGIC_DMABUF); in videobuf_dma_free()
349 BUG_ON(dma->sglen); in videobuf_dma_free()
351 if (dma->pages) { in videobuf_dma_free()
352 unpin_user_pages_dirty_lock(dma->pages, dma->nr_pages, in videobuf_dma_free()
353 dma->direction == DMA_FROM_DEVICE); in videobuf_dma_free()
354 kfree(dma->pages); in videobuf_dma_free()
355 dma->pages = NULL; in videobuf_dma_free()
358 if (dma->dma_addr) { in videobuf_dma_free()
359 for (i = 0; i < dma->nr_pages; i++) { in videobuf_dma_free()
362 addr = page_address(dma->vaddr_pages[i]); in videobuf_dma_free()
363 dma_free_coherent(dma->dev, PAGE_SIZE, addr, in videobuf_dma_free()
364 dma->dma_addr[i]); in videobuf_dma_free()
366 kfree(dma->dma_addr); in videobuf_dma_free()
367 dma->dma_addr = NULL; in videobuf_dma_free()
368 kfree(dma->vaddr_pages); in videobuf_dma_free()
369 dma->vaddr_pages = NULL; in videobuf_dma_free()
370 vunmap(dma->vaddr); in videobuf_dma_free()
371 dma->vaddr = NULL; in videobuf_dma_free()
374 if (dma->bus_addr) in videobuf_dma_free()
375 dma->bus_addr = 0; in videobuf_dma_free()
376 dma->direction = DMA_NONE; in videobuf_dma_free()
382 /* --------------------------------------------------------------------- */
386 struct videobuf_mapping *map = vma->vm_private_data; in videobuf_vm_open()
388 dprintk(2, "vm_open %p [count=%d,vma=%08lx-%08lx]\n", map, in videobuf_vm_open()
389 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_open()
391 map->count++; in videobuf_vm_open()
396 struct videobuf_mapping *map = vma->vm_private_data; in videobuf_vm_close()
397 struct videobuf_queue *q = map->q; in videobuf_vm_close()
398 struct videobuf_dma_sg_memory *mem; in videobuf_vm_close() local
401 dprintk(2, "vm_close %p [count=%d,vma=%08lx-%08lx]\n", map, in videobuf_vm_close()
402 map->count, vma->vm_start, vma->vm_end); in videobuf_vm_close()
404 map->count--; in videobuf_vm_close()
405 if (0 == map->count) { in videobuf_vm_close()
409 if (NULL == q->bufs[i]) in videobuf_vm_close()
411 mem = q->bufs[i]->priv; in videobuf_vm_close()
412 if (!mem) in videobuf_vm_close()
415 MAGIC_CHECK(mem->magic, MAGIC_SG_MEM); in videobuf_vm_close()
417 if (q->bufs[i]->map != map) in videobuf_vm_close()
419 q->bufs[i]->map = NULL; in videobuf_vm_close()
420 q->bufs[i]->baddr = 0; in videobuf_vm_close()
421 q->ops->buf_release(q, q->bufs[i]); in videobuf_vm_close()
430 * Get a anonymous page for the mapping. Make sure we can DMA to that
437 struct vm_area_struct *vma = vmf->vma; in videobuf_vm_fault()
440 dprintk(3, "fault: fault @ %08lx [vma %08lx-%08lx]\n", in videobuf_vm_fault()
441 vmf->address, vma->vm_start, vma->vm_end); in videobuf_vm_fault()
446 clear_user_highpage(page, vmf->address); in videobuf_vm_fault()
447 vmf->page = page; in videobuf_vm_fault()
458 /* ---------------------------------------------------------------------
470 struct videobuf_dma_sg_memory *mem; in __videobuf_alloc_vb() local
473 vb = kzalloc(size + sizeof(*mem), GFP_KERNEL); in __videobuf_alloc_vb()
477 mem = vb->priv = ((char *)vb) + size; in __videobuf_alloc_vb()
478 mem->magic = MAGIC_SG_MEM; in __videobuf_alloc_vb()
480 videobuf_dma_init(&mem->dma); in __videobuf_alloc_vb()
483 __func__, vb, (long)sizeof(*vb), (long)size - sizeof(*vb), in __videobuf_alloc_vb()
484 mem, (long)sizeof(*mem)); in __videobuf_alloc_vb()
491 struct videobuf_dma_sg_memory *mem = buf->priv; in __videobuf_to_vaddr() local
492 BUG_ON(!mem); in __videobuf_to_vaddr()
494 MAGIC_CHECK(mem->magic, MAGIC_SG_MEM); in __videobuf_to_vaddr()
496 return mem->dma.vaddr; in __videobuf_to_vaddr()
503 struct videobuf_dma_sg_memory *mem = vb->priv; in __videobuf_iolock() local
508 BUG_ON(!mem); in __videobuf_iolock()
510 MAGIC_CHECK(mem->magic, MAGIC_SG_MEM); in __videobuf_iolock()
512 if (!mem->dma.dev) in __videobuf_iolock()
513 mem->dma.dev = q->dev; in __videobuf_iolock()
515 WARN_ON(mem->dma.dev != q->dev); in __videobuf_iolock()
517 switch (vb->memory) { in __videobuf_iolock()
520 if (0 == vb->baddr) { in __videobuf_iolock()
521 /* no userspace addr -- kernel bounce buffer */ in __videobuf_iolock()
522 pages = PAGE_ALIGN(vb->size) >> PAGE_SHIFT; in __videobuf_iolock()
523 err = videobuf_dma_init_kernel(&mem->dma, in __videobuf_iolock()
528 } else if (vb->memory == V4L2_MEMORY_USERPTR) { in __videobuf_iolock()
529 /* dma directly to userspace */ in __videobuf_iolock()
530 err = videobuf_dma_init_user(&mem->dma, in __videobuf_iolock()
532 vb->baddr, vb->bsize); in __videobuf_iolock()
538 we take current->mm->mmap_lock there, to prevent in __videobuf_iolock()
541 err = videobuf_dma_init_user_locked(&mem->dma, in __videobuf_iolock()
543 vb->baddr, vb->bsize); in __videobuf_iolock()
550 return -EINVAL; in __videobuf_iolock()
551 /* FIXME: need sanity checks for vb->boff */ in __videobuf_iolock()
557 bus = (dma_addr_t)(unsigned long)fbuf->base + vb->boff; in __videobuf_iolock()
558 pages = PAGE_ALIGN(vb->size) >> PAGE_SHIFT; in __videobuf_iolock()
559 err = videobuf_dma_init_overlay(&mem->dma, DMA_FROM_DEVICE, in __videobuf_iolock()
567 err = videobuf_dma_map(q->dev, &mem->dma); in __videobuf_iolock()
577 struct videobuf_dma_sg_memory *mem = buf->priv; in __videobuf_sync() local
578 BUG_ON(!mem || !mem->dma.sglen); in __videobuf_sync()
580 MAGIC_CHECK(mem->magic, MAGIC_SG_MEM); in __videobuf_sync()
581 MAGIC_CHECK(mem->dma.magic, MAGIC_DMABUF); in __videobuf_sync()
583 dma_sync_sg_for_cpu(q->dev, mem->dma.sglist, in __videobuf_sync()
584 mem->dma.nr_pages, mem->dma.direction); in __videobuf_sync()
593 struct videobuf_dma_sg_memory *mem = buf->priv; in __videobuf_mmap_mapper() local
598 retval = -EINVAL; in __videobuf_mmap_mapper()
600 BUG_ON(!mem); in __videobuf_mmap_mapper()
601 MAGIC_CHECK(mem->magic, MAGIC_SG_MEM); in __videobuf_mmap_mapper()
605 if (buf == q->bufs[first]) { in __videobuf_mmap_mapper()
606 size = PAGE_ALIGN(q->bufs[first]->bsize); in __videobuf_mmap_mapper()
614 (vma->vm_pgoff << PAGE_SHIFT)); in __videobuf_mmap_mapper()
621 retval = -ENOMEM; in __videobuf_mmap_mapper()
628 if (NULL == q->bufs[i]) in __videobuf_mmap_mapper()
630 q->bufs[i]->map = map; in __videobuf_mmap_mapper()
631 q->bufs[i]->baddr = vma->vm_start + size; in __videobuf_mmap_mapper()
632 size += PAGE_ALIGN(q->bufs[i]->bsize); in __videobuf_mmap_mapper()
635 map->count = 1; in __videobuf_mmap_mapper()
636 map->q = q; in __videobuf_mmap_mapper()
637 vma->vm_ops = &videobuf_vm_ops; in __videobuf_mmap_mapper()
638 vma->vm_flags |= VM_DONTEXPAND | VM_DONTDUMP; in __videobuf_mmap_mapper()
639 vma->vm_flags &= ~VM_IO; /* using shared anonymous pages */ in __videobuf_mmap_mapper()
640 vma->vm_private_data = map; in __videobuf_mmap_mapper()
641 dprintk(1, "mmap %p: q=%p %08lx-%08lx pgoff %08lx bufs %d-%d\n", in __videobuf_mmap_mapper()
642 map, q, vma->vm_start, vma->vm_end, vma->vm_pgoff, first, last); in __videobuf_mmap_mapper()