Lines Matching refs:dev_addr
284 dma_addr_t dev_addr; in xen_swiotlb_alloc_coherent() local
315 dev_addr = xen_phys_to_bus(phys); in xen_swiotlb_alloc_coherent()
316 if (((dev_addr + size - 1 <= dma_mask)) && in xen_swiotlb_alloc_coherent()
318 *dma_handle = dev_addr; in xen_swiotlb_alloc_coherent()
333 dma_addr_t dev_addr, unsigned long attrs) in xen_swiotlb_free_coherent() argument
344 phys = xen_bus_to_phys(dev_addr); in xen_swiotlb_free_coherent()
349 if (!WARN_ON((dev_addr + size - 1 > dma_mask) || in xen_swiotlb_free_coherent()
370 dma_addr_t dev_addr = xen_phys_to_bus(phys); in xen_swiotlb_map_page() local
378 if (dma_capable(dev, dev_addr, size) && in xen_swiotlb_map_page()
380 !xen_arch_need_swiotlb(dev, phys, dev_addr) && in xen_swiotlb_map_page()
387 trace_swiotlb_bounced(dev, dev_addr, size, swiotlb_force); in xen_swiotlb_map_page()
395 dev_addr = xen_phys_to_bus(map); in xen_swiotlb_map_page()
400 if (unlikely(!dma_capable(dev, dev_addr, size))) { in xen_swiotlb_map_page()
408 xen_dma_sync_for_device(dev, dev_addr, phys, size, dir); in xen_swiotlb_map_page()
409 return dev_addr; in xen_swiotlb_map_page()
420 static void xen_swiotlb_unmap_page(struct device *hwdev, dma_addr_t dev_addr, in xen_swiotlb_unmap_page() argument
423 phys_addr_t paddr = xen_bus_to_phys(dev_addr); in xen_swiotlb_unmap_page()
428 xen_dma_sync_for_cpu(hwdev, dev_addr, paddr, size, dir); in xen_swiotlb_unmap_page()
431 if (is_xen_swiotlb_buffer(dev_addr)) in xen_swiotlb_unmap_page()