Searched refs:DMA_ATTR_WEAK_ORDERING (Results 1 – 11 of 11) sorted by relevance
196 if (unlikely(attrs & DMA_ATTR_WEAK_ORDERING)) in tce_build_cell()603 if (iommu_fixed_is_weak == (attrs & DMA_ATTR_WEAK_ORDERING)) in dma_fixed_map_page()616 if (iommu_fixed_is_weak == (attrs & DMA_ATTR_WEAK_ORDERING)) in dma_fixed_unmap_page()628 if (iommu_fixed_is_weak == (attrs & DMA_ATTR_WEAK_ORDERING)) in dma_fixed_map_sg()640 if (iommu_fixed_is_weak == (attrs & DMA_ATTR_WEAK_ORDERING)) in dma_fixed_unmap_sg()1146 iommu_fixed_is_weak = DMA_ATTR_WEAK_ORDERING; in setup_iommu_fixed()
26 DMA_ATTR_WEAK_ORDERING29 DMA_ATTR_WEAK_ORDERING specifies that reads and writes to the mapping32 Since it is optional for platforms to implement DMA_ATTR_WEAK_ORDERING,
103 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
123 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
196 if (attrs & DMA_ATTR_WEAK_ORDERING) in dma_4v_alloc_coherent()395 if (attrs & DMA_ATTR_WEAK_ORDERING) in dma_4v_map_page()493 if (attrs & DMA_ATTR_WEAK_ORDERING) in dma_4v_map_sg()
315 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
152 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
598 DMA_ATTR_WEAK_ORDERING | in gk20a_instmem_new()
632 DMA_ATTR_WEAK_ORDERING); in __bnxt_alloc_rx_page()653 DMA_ATTR_WEAK_ORDERING); in __bnxt_alloc_rx_data()758 DMA_ATTR_WEAK_ORDERING); in bnxt_alloc_rx_page()852 DMA_ATTR_WEAK_ORDERING); in bnxt_rx_page_skb()895 bp->rx_dir, DMA_ATTR_WEAK_ORDERING); in bnxt_rx_skb()964 DMA_ATTR_WEAK_ORDERING); in bnxt_rx_pages()1416 DMA_ATTR_WEAK_ORDERING); in bnxt_tpa_end()2141 DMA_ATTR_WEAK_ORDERING); in bnxt_free_rx_skbs()2163 DMA_ATTR_WEAK_ORDERING); in bnxt_free_rx_skbs()2169 DMA_ATTR_WEAK_ORDERING); in bnxt_free_rx_skbs()[all …]
138 (DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING)
27 #define DMA_ATTR_WEAK_ORDERING (1UL << 1) macro