Lines Matching refs:src_nents

149 		       struct scatterlist *dst, int src_nents,  in caam_unmap()  argument
155 if (src_nents) in caam_unmap()
156 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
160 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
362 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
383 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
384 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
388 return ERR_PTR(src_nents); in aead_edesc_alloc()
399 if (src_nents) { in aead_edesc_alloc()
400 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
416 dma_unmap_sg(dev, req->src, src_nents, in aead_edesc_alloc()
428 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
429 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
433 return ERR_PTR(src_nents); in aead_edesc_alloc()
436 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
476 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
491 caam_unmap(dev, req->src, req->dst, src_nents, in aead_edesc_alloc()
498 edesc->src_nents = src_nents; in aead_edesc_alloc()
515 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
537 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
1119 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in skcipher_edesc_alloc() local
1127 src_nents = sg_nents_for_len(req->src, req->cryptlen); in skcipher_edesc_alloc()
1128 if (unlikely(src_nents < 0)) { in skcipher_edesc_alloc()
1131 return ERR_PTR(src_nents); in skcipher_edesc_alloc()
1142 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in skcipher_edesc_alloc()
1153 dma_unmap_sg(dev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1157 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in skcipher_edesc_alloc()
1186 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1195 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1208 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1214 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1232 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in skcipher_edesc_alloc()
1264 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1276 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1407 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1445 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
3361 if (edesc->src_nents) in ahash_unmap()
3362 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3532 int src_nents, mapped_nents, qm_sg_bytes, qm_sg_src_index; in ahash_update_ctx() local
3543 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
3544 if (src_nents < 0) { in ahash_update_ctx()
3546 return src_nents; in ahash_update_ctx()
3549 if (src_nents) { in ahash_update_ctx()
3550 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_ctx()
3563 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_ctx()
3568 edesc->src_nents = src_nents; in ahash_update_ctx()
3720 int src_nents, mapped_nents; in ahash_finup_ctx() local
3726 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_finup_ctx()
3727 if (src_nents < 0) { in ahash_finup_ctx()
3729 return src_nents; in ahash_finup_ctx()
3732 if (src_nents) { in ahash_finup_ctx()
3733 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_finup_ctx()
3746 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_ctx()
3750 edesc->src_nents = src_nents; in ahash_finup_ctx()
3813 int src_nents, mapped_nents; in ahash_digest() local
3819 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_digest()
3820 if (src_nents < 0) { in ahash_digest()
3822 return src_nents; in ahash_digest()
3825 if (src_nents) { in ahash_digest()
3826 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_digest()
3839 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_digest()
3843 edesc->src_nents = src_nents; in ahash_digest()
3984 int qm_sg_bytes, src_nents, mapped_nents; in ahash_update_no_ctx() local
3995 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_no_ctx()
3996 if (src_nents < 0) { in ahash_update_no_ctx()
3998 return src_nents; in ahash_update_no_ctx()
4001 if (src_nents) { in ahash_update_no_ctx()
4002 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_no_ctx()
4015 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_no_ctx()
4020 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4102 int qm_sg_bytes, src_nents, mapped_nents; in ahash_finup_no_ctx() local
4108 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_finup_no_ctx()
4109 if (src_nents < 0) { in ahash_finup_no_ctx()
4111 return src_nents; in ahash_finup_no_ctx()
4114 if (src_nents) { in ahash_finup_no_ctx()
4115 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_finup_no_ctx()
4128 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_no_ctx()
4132 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4201 int src_nents, mapped_nents; in ahash_update_first() local
4213 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_first()
4214 if (src_nents < 0) { in ahash_update_first()
4216 return src_nents; in ahash_update_first()
4219 if (src_nents) { in ahash_update_first()
4220 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_first()
4233 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_first()
4238 edesc->src_nents = src_nents; in ahash_update_first()