Lines Matching refs:src_nents
142 struct scatterlist *dst, int src_nents, in caam_unmap() argument
148 if (src_nents) in caam_unmap()
149 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
153 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
360 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
381 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
382 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
386 return ERR_PTR(src_nents); in aead_edesc_alloc()
397 if (src_nents) { in aead_edesc_alloc()
398 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
414 dma_unmap_sg(dev, req->src, src_nents, in aead_edesc_alloc()
426 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
427 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
431 return ERR_PTR(src_nents); in aead_edesc_alloc()
434 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
474 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
489 caam_unmap(dev, req->src, req->dst, src_nents, in aead_edesc_alloc()
496 edesc->src_nents = src_nents; in aead_edesc_alloc()
513 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
535 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
1126 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in skcipher_edesc_alloc() local
1134 src_nents = sg_nents_for_len(req->src, req->cryptlen); in skcipher_edesc_alloc()
1135 if (unlikely(src_nents < 0)) { in skcipher_edesc_alloc()
1138 return ERR_PTR(src_nents); in skcipher_edesc_alloc()
1149 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in skcipher_edesc_alloc()
1160 dma_unmap_sg(dev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1164 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in skcipher_edesc_alloc()
1193 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1202 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1215 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1221 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1239 caam_unmap(dev, req->src, req->dst, src_nents, dst_nents, in skcipher_edesc_alloc()
1271 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1283 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1414 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1452 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
3316 if (edesc->src_nents) in ahash_unmap()
3317 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3472 int src_nents, mapped_nents, qm_sg_bytes, qm_sg_src_index; in ahash_update_ctx() local
3484 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
3485 if (src_nents < 0) { in ahash_update_ctx()
3487 return src_nents; in ahash_update_ctx()
3490 if (src_nents) { in ahash_update_ctx()
3491 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_ctx()
3504 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_ctx()
3509 edesc->src_nents = src_nents; in ahash_update_ctx()
3668 int src_nents, mapped_nents; in ahash_finup_ctx() local
3674 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_finup_ctx()
3675 if (src_nents < 0) { in ahash_finup_ctx()
3677 return src_nents; in ahash_finup_ctx()
3680 if (src_nents) { in ahash_finup_ctx()
3681 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_finup_ctx()
3694 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_ctx()
3698 edesc->src_nents = src_nents; in ahash_finup_ctx()
3761 int src_nents, mapped_nents; in ahash_digest() local
3767 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_digest()
3768 if (src_nents < 0) { in ahash_digest()
3770 return src_nents; in ahash_digest()
3773 if (src_nents) { in ahash_digest()
3774 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_digest()
3787 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_digest()
3791 edesc->src_nents = src_nents; in ahash_digest()
3933 int qm_sg_bytes, src_nents, mapped_nents; in ahash_update_no_ctx() local
3944 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_no_ctx()
3945 if (src_nents < 0) { in ahash_update_no_ctx()
3947 return src_nents; in ahash_update_no_ctx()
3950 if (src_nents) { in ahash_update_no_ctx()
3951 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_no_ctx()
3964 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_no_ctx()
3969 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4059 int qm_sg_bytes, src_nents, mapped_nents; in ahash_finup_no_ctx() local
4065 src_nents = sg_nents_for_len(req->src, req->nbytes); in ahash_finup_no_ctx()
4066 if (src_nents < 0) { in ahash_finup_no_ctx()
4068 return src_nents; in ahash_finup_no_ctx()
4071 if (src_nents) { in ahash_finup_no_ctx()
4072 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_finup_no_ctx()
4085 dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE); in ahash_finup_no_ctx()
4089 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4157 int src_nents, mapped_nents; in ahash_update_first() local
4169 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_first()
4170 if (src_nents < 0) { in ahash_update_first()
4172 return src_nents; in ahash_update_first()
4175 if (src_nents) { in ahash_update_first()
4176 mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents, in ahash_update_first()
4189 dma_unmap_sg(ctx->dev, req->src, src_nents, in ahash_update_first()
4194 edesc->src_nents = src_nents; in ahash_update_first()