Lines Matching refs:peer
81 if (!ivc->peer) in tegra_ivc_invalidate()
84 dma_sync_single_for_cpu(ivc->peer, phys, TEGRA_IVC_ALIGN, in tegra_ivc_invalidate()
90 if (!ivc->peer) in tegra_ivc_flush()
93 dma_sync_single_for_device(ivc->peer, phys, TEGRA_IVC_ALIGN, in tegra_ivc_flush()
252 if (!ivc->peer || WARN_ON(frame >= ivc->num_frames)) in tegra_ivc_invalidate_frame()
257 dma_sync_single_for_cpu(ivc->peer, phys, size, DMA_FROM_DEVICE); in tegra_ivc_invalidate_frame()
266 if (!ivc->peer || WARN_ON(frame >= ivc->num_frames)) in tegra_ivc_flush_frame()
271 dma_sync_single_for_device(ivc->peer, phys, size, DMA_TO_DEVICE); in tegra_ivc_flush_frame()
620 int tegra_ivc_init(struct tegra_ivc *ivc, struct device *peer, void *rx, in tegra_ivc_init() argument
646 if (peer) { in tegra_ivc_init()
647 ivc->rx.phys = dma_map_single(peer, rx, queue_size, in tegra_ivc_init()
649 if (dma_mapping_error(peer, ivc->rx.phys)) in tegra_ivc_init()
652 ivc->tx.phys = dma_map_single(peer, tx, queue_size, in tegra_ivc_init()
654 if (dma_mapping_error(peer, ivc->tx.phys)) { in tegra_ivc_init()
655 dma_unmap_single(peer, ivc->rx.phys, queue_size, in tegra_ivc_init()
666 ivc->peer = peer; in tegra_ivc_init()
685 if (ivc->peer) { in tegra_ivc_cleanup()
689 dma_unmap_single(ivc->peer, ivc->rx.phys, size, in tegra_ivc_cleanup()
691 dma_unmap_single(ivc->peer, ivc->tx.phys, size, in tegra_ivc_cleanup()