Lines Matching refs:hwdev
276 xen_swiotlb_alloc_coherent(struct device *hwdev, size_t size, in xen_swiotlb_alloc_coherent() argument
302 ret = xen_alloc_coherent_pages(hwdev, size, dma_handle, flags, attrs); in xen_swiotlb_alloc_coherent()
307 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_alloc_coherent()
308 dma_mask = hwdev->coherent_dma_mask; in xen_swiotlb_alloc_coherent()
322 xen_free_coherent_pages(hwdev, size, ret, (dma_addr_t)phys, attrs); in xen_swiotlb_alloc_coherent()
332 xen_swiotlb_free_coherent(struct device *hwdev, size_t size, void *vaddr, in xen_swiotlb_free_coherent() argument
339 if (hwdev && hwdev->coherent_dma_mask) in xen_swiotlb_free_coherent()
340 dma_mask = hwdev->coherent_dma_mask; in xen_swiotlb_free_coherent()
354 xen_free_coherent_pages(hwdev, size, vaddr, (dma_addr_t)phys, attrs); in xen_swiotlb_free_coherent()
420 static void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_unmap_page() argument
427 if (!dev_is_dma_coherent(hwdev) && !(attrs & DMA_ATTR_SKIP_CPU_SYNC)) in xen_swiotlb_unmap_page()
428 xen_dma_sync_for_cpu(hwdev, dev_addr, paddr, size, dir); in xen_swiotlb_unmap_page()
432 swiotlb_tbl_unmap_single(hwdev, paddr, size, size, dir, attrs); in xen_swiotlb_unmap_page()
466 xen_swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in xen_swiotlb_unmap_sg() argument
475 xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg), in xen_swiotlb_unmap_sg()
537 xen_swiotlb_dma_supported(struct device *hwdev, u64 mask) in xen_swiotlb_dma_supported() argument