Lines Matching full:area
44 * snd_dma_alloc_dir_pages - allocate the buffer area according to the given
74 dmab->area = __snd_dma_alloc_pages(dmab, size); in snd_dma_alloc_dir_pages()
75 if (!dmab->area) in snd_dma_alloc_dir_pages()
83 * snd_dma_alloc_pages_fallback - allocate the buffer area according to the given type with fallback
110 if (! dmab->area) in snd_dma_alloc_pages_fallback()
182 * @area: VM area information
187 struct vm_area_struct *area) in snd_dma_buffer_mmap() argument
195 return ops->mmap(dmab, area); in snd_dma_buffer_mmap()
253 return virt_to_page(dmab->area + offset); in snd_sgbuf_get_page()
328 do_free_pages(dmab->area, dmab->bytes, false); in snd_dma_continuous_free()
332 struct vm_area_struct *area) in snd_dma_continuous_mmap() argument
334 return remap_pfn_range(area, area->vm_start, in snd_dma_continuous_mmap()
336 area->vm_end - area->vm_start, in snd_dma_continuous_mmap()
337 area->vm_page_prot); in snd_dma_continuous_mmap()
356 vfree(dmab->area); in snd_dma_vmalloc_free()
360 struct vm_area_struct *area) in snd_dma_vmalloc_mmap() argument
362 return remap_vmalloc_range(area, dmab->area, 0); in snd_dma_vmalloc_mmap()
366 page_to_phys(vmalloc_to_page((dmab)->area + (offset)))
377 return vmalloc_to_page(dmab->area + offset); in snd_dma_vmalloc_get_page()
444 if (pool && dmab->area) in snd_dma_iram_free()
445 gen_pool_free(pool, (unsigned long)dmab->area, dmab->bytes); in snd_dma_iram_free()
449 struct vm_area_struct *area) in snd_dma_iram_mmap() argument
451 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_iram_mmap()
452 return remap_pfn_range(area, area->vm_start, in snd_dma_iram_mmap()
454 area->vm_end - area->vm_start, in snd_dma_iram_mmap()
455 area->vm_page_prot); in snd_dma_iram_mmap()
475 dma_free_coherent(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_dev_free()
479 struct vm_area_struct *area) in snd_dma_dev_mmap() argument
481 return dma_mmap_coherent(dmab->dev.dev, area, in snd_dma_dev_mmap()
482 dmab->area, dmab->addr, dmab->bytes); in snd_dma_dev_mmap()
503 do_free_pages(dmab->area, dmab->bytes, true); in snd_dma_wc_free()
507 struct vm_area_struct *area) in snd_dma_wc_mmap() argument
509 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_wc_mmap()
510 return snd_dma_continuous_mmap(dmab, area); in snd_dma_wc_mmap()
520 dma_free_wc(dmab->dev.dev, dmab->bytes, dmab->area, dmab->addr); in snd_dma_wc_free()
524 struct vm_area_struct *area) in snd_dma_wc_mmap() argument
526 return dma_mmap_wc(dmab->dev.dev, area, in snd_dma_wc_mmap()
527 dmab->area, dmab->addr, dmab->bytes); in snd_dma_wc_mmap()
574 dma_vunmap_noncontiguous(dmab->dev.dev, dmab->area); in snd_dma_noncontig_free()
580 struct vm_area_struct *area) in snd_dma_noncontig_mmap() argument
582 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_noncontig_mmap()
592 invalidate_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
598 flush_kernel_vmap_range(dmab->area, dmab->bytes); in snd_dma_noncontig_sync()
702 struct vm_area_struct *area) in snd_dma_sg_wc_mmap() argument
704 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_sg_wc_mmap()
705 return dma_mmap_noncontiguous(dmab->dev.dev, area, in snd_dma_sg_wc_mmap()
781 vunmap(dmab->area); in snd_dma_sg_fallback_free()
786 struct vm_area_struct *area) in snd_dma_sg_fallback_mmap() argument
791 area->vm_page_prot = pgprot_writecombine(area->vm_page_prot); in snd_dma_sg_fallback_mmap()
792 return vm_map_pages(area, sgbuf->pages, sgbuf->count); in snd_dma_sg_fallback_mmap()
822 dma_free_noncoherent(dmab->dev.dev, dmab->bytes, dmab->area, in snd_dma_noncoherent_free()
827 struct vm_area_struct *area) in snd_dma_noncoherent_mmap() argument
829 area->vm_page_prot = vm_get_page_prot(area->vm_flags); in snd_dma_noncoherent_mmap()
830 return dma_mmap_pages(dmab->dev.dev, area, in snd_dma_noncoherent_mmap()
831 area->vm_end - area->vm_start, in snd_dma_noncoherent_mmap()
832 virt_to_page(dmab->area)); in snd_dma_noncoherent_mmap()