Lines Matching refs:dma_dom
1765 struct dma_ops_domain *dma_dom, in dma_ops_alloc_iova() argument
1773 pfn = alloc_iova_fast(&dma_dom->iovad, pages, in dma_ops_alloc_iova()
1777 pfn = alloc_iova_fast(&dma_dom->iovad, pages, in dma_ops_alloc_iova()
1783 static void dma_ops_free_iova(struct dma_ops_domain *dma_dom, in dma_ops_free_iova() argument
1790 free_iova_fast(&dma_dom->iovad, address, pages); in dma_ops_free_iova()
1914 struct dma_ops_domain *dma_dom; in dma_ops_domain_alloc() local
1916 dma_dom = kzalloc(sizeof(struct dma_ops_domain), GFP_KERNEL); in dma_ops_domain_alloc()
1917 if (!dma_dom) in dma_ops_domain_alloc()
1920 if (protection_domain_init(&dma_dom->domain)) in dma_ops_domain_alloc()
1923 dma_dom->domain.mode = PAGE_MODE_3_LEVEL; in dma_ops_domain_alloc()
1924 dma_dom->domain.pt_root = (void *)get_zeroed_page(GFP_KERNEL); in dma_ops_domain_alloc()
1925 dma_dom->domain.flags = PD_DMA_OPS_MASK; in dma_ops_domain_alloc()
1926 if (!dma_dom->domain.pt_root) in dma_ops_domain_alloc()
1929 init_iova_domain(&dma_dom->iovad, PAGE_SIZE, IOVA_START_PFN); in dma_ops_domain_alloc()
1931 if (init_iova_flush_queue(&dma_dom->iovad, iova_domain_flush_tlb, NULL)) in dma_ops_domain_alloc()
1935 copy_reserved_iova(&reserved_iova_ranges, &dma_dom->iovad); in dma_ops_domain_alloc()
1937 return dma_dom; in dma_ops_domain_alloc()
1940 dma_ops_domain_free(dma_dom); in dma_ops_domain_alloc()
2424 struct dma_ops_domain *dma_dom, in __map_single() argument
2440 address = dma_ops_alloc_iova(dev, dma_dom, pages, dma_mask); in __map_single()
2448 ret = iommu_map_page(&dma_dom->domain, start, paddr, in __map_single()
2458 domain_flush_np_cache(&dma_dom->domain, address, size); in __map_single()
2467 iommu_unmap_page(&dma_dom->domain, start, PAGE_SIZE); in __map_single()
2470 spin_lock_irqsave(&dma_dom->domain.lock, flags); in __map_single()
2471 domain_flush_tlb(&dma_dom->domain); in __map_single()
2472 domain_flush_complete(&dma_dom->domain); in __map_single()
2473 spin_unlock_irqrestore(&dma_dom->domain.lock, flags); in __map_single()
2475 dma_ops_free_iova(dma_dom, address, pages); in __map_single()
2484 static void __unmap_single(struct dma_ops_domain *dma_dom, in __unmap_single() argument
2497 iommu_unmap_page(&dma_dom->domain, start, PAGE_SIZE); in __unmap_single()
2504 spin_lock_irqsave(&dma_dom->domain.lock, flags); in __unmap_single()
2505 domain_flush_tlb(&dma_dom->domain); in __unmap_single()
2506 domain_flush_complete(&dma_dom->domain); in __unmap_single()
2507 spin_unlock_irqrestore(&dma_dom->domain.lock, flags); in __unmap_single()
2508 dma_ops_free_iova(dma_dom, dma_addr, pages); in __unmap_single()
2511 queue_iova(&dma_dom->iovad, dma_addr >> PAGE_SHIFT, pages, 0); in __unmap_single()
2525 struct dma_ops_domain *dma_dom; in map_page() local
2535 dma_dom = to_dma_ops_domain(domain); in map_page()
2537 return __map_single(dev, dma_dom, paddr, size, dir, dma_mask); in map_page()
2547 struct dma_ops_domain *dma_dom; in unmap_page() local
2553 dma_dom = to_dma_ops_domain(domain); in unmap_page()
2555 __unmap_single(dma_dom, dma_addr, size, dir); in unmap_page()
2594 struct dma_ops_domain *dma_dom; in map_sg() local
2604 dma_dom = to_dma_ops_domain(domain); in map_sg()
2609 address = dma_ops_alloc_iova(dev, dma_dom, npages, dma_mask); in map_sg()
2669 free_iova_fast(&dma_dom->iovad, address >> PAGE_SHIFT, npages); in map_sg()
2684 struct dma_ops_domain *dma_dom; in unmap_sg() local
2693 dma_dom = to_dma_ops_domain(domain); in unmap_sg()
2696 __unmap_single(dma_dom, startaddr, npages << PAGE_SHIFT, dir); in unmap_sg()
2708 struct dma_ops_domain *dma_dom; in alloc_coherent() local
2719 dma_dom = to_dma_ops_domain(domain); in alloc_coherent()
2739 *dma_addr = __map_single(dev, dma_dom, page_to_phys(page), in alloc_coherent()
2763 struct dma_ops_domain *dma_dom; in free_coherent() local
2773 dma_dom = to_dma_ops_domain(domain); in free_coherent()
2775 __unmap_single(dma_dom, dma_addr, size, DMA_BIDIRECTIONAL); in free_coherent()
3015 struct dma_ops_domain *dma_dom; in amd_iommu_domain_free() local
3030 dma_dom = to_dma_ops_domain(domain); in amd_iommu_domain_free()
3031 dma_ops_domain_free(dma_dom); in amd_iommu_domain_free()
3254 struct dma_ops_domain *dma_dom = to_dma_ops_domain(to_pdomain(domain)); in amd_iommu_apply_resv_region() local
3260 WARN_ON_ONCE(reserve_iova(&dma_dom->iovad, start, end) == NULL); in amd_iommu_apply_resv_region()