Searched refs:dma_sg (Results 1 – 4 of 4) sorted by relevance
20 struct scatterlist *dma_sg = startsg; /* pointer to current DMA */ in iommu_fill_pdir() local28 dma_sg--; in iommu_fill_pdir()47 BUG_ON(pdirp && (dma_len != sg_dma_len(dma_sg))); in iommu_fill_pdir()49 dma_sg++; in iommu_fill_pdir()57 sg_dma_address(dma_sg) = pide | ioc->ibase; in iommu_fill_pdir()62 sg_dma_address(dma_sg) = pide; in iommu_fill_pdir()71 sg_dma_len(dma_sg) += startsg->length; in iommu_fill_pdir()
1202 struct scatterlist *dma_sg = startsg; /* pointer to current DMA */ in sba_fill_pdir() local1229 dma_sg = sg_next(dma_sg); in sba_fill_pdir()1230 dma_sg->dma_address = pide | ioc->ibase; in sba_fill_pdir()1245 dma_sg->dma_length += cnt; in sba_fill_pdir()1300 struct scatterlist *dma_sg; /* next DMA stream head */ in sba_coalesce_chunks() local1312 dma_sg = vcontig_sg = startsg; in sba_coalesce_chunks()1405 dma_sg->dma_length = 0; in sba_coalesce_chunks()1408 dma_sg->dma_address = (dma_addr_t)(PIDE_FLAG | (idx << iovp_shift) in sba_coalesce_chunks()
95 wa->dma_sg = sg; in ccp_init_sg_workarea()116 if (wa->sg_used == sg_dma_len(wa->dma_sg)) { in ccp_update_sg_workarea()118 wa->dma_sg = sg_next(wa->dma_sg); in ccp_update_sg_workarea()318 nbytes = min(sg_dma_len(sg_wa->dma_sg) - sg_wa->sg_used, in ccp_queue_buf()350 sg_src_len = sg_dma_len(src->sg_wa.dma_sg) - src->sg_wa.sg_used; in ccp_prepare_data()354 sg_dst_len = sg_dma_len(dst->sg_wa.dma_sg) - dst->sg_wa.sg_used; in ccp_prepare_data()384 op->src.u.dma.address = sg_dma_address(src->sg_wa.dma_sg); in ccp_prepare_data()405 op->dst.u.dma.address = sg_dma_address(dst->sg_wa.dma_sg); in ccp_prepare_data()
471 struct scatterlist *dma_sg; member