Lines Matching refs:dma_coherent

665 	void *dma_coherent;  in arcmsr_alloc_io_queue()  local
672 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
674 if (!dma_coherent) { in arcmsr_alloc_io_queue()
679 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
680 acb->pmuB = (struct MessageUnit_B *)dma_coherent; in arcmsr_alloc_io_queue()
686 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
688 if (!dma_coherent) { in arcmsr_alloc_io_queue()
693 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
694 acb->pmuD = (struct MessageUnit_D *)dma_coherent; in arcmsr_alloc_io_queue()
702 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
704 if (!dma_coherent){ in arcmsr_alloc_io_queue()
709 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
710 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
723 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->ioqueue_size, in arcmsr_alloc_io_queue()
725 if (!dma_coherent) { in arcmsr_alloc_io_queue()
730 acb->dma_coherent2 = dma_coherent; in arcmsr_alloc_io_queue()
731 acb->pCompletionQ = dma_coherent; in arcmsr_alloc_io_queue()
746 void *dma_coherent; in arcmsr_alloc_ccb_pool() local
773 dma_coherent = dma_alloc_coherent(&pdev->dev, acb->uncache_size, &dma_coherent_handle, GFP_KERNEL); in arcmsr_alloc_ccb_pool()
774 if(!dma_coherent){ in arcmsr_alloc_ccb_pool()
778 acb->dma_coherent = dma_coherent; in arcmsr_alloc_ccb_pool()
780 memset(dma_coherent, 0, acb->uncache_size); in arcmsr_alloc_ccb_pool()
782 ccb_tmp = dma_coherent; in arcmsr_alloc_ccb_pool()
784 acb->vir2phy_offset = (unsigned long)dma_coherent - (unsigned long)dma_coherent_handle; in arcmsr_alloc_ccb_pool()
2029 dma_free_coherent(&acb->pdev->dev, acb->uncache_size, acb->dma_coherent, acb->dma_coherent_handle); in arcmsr_free_ccb_pool()