Home
last modified time | relevance | path

Searched refs:src_nents (Results 1 – 25 of 25) sorted by relevance

/Linux-v5.4/drivers/crypto/caam/
Dcaamhash.c561 int src_nents; member
573 if (edesc->src_nents) in ahash_unmap()
574 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
796 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
819 src_nents = sg_nents_for_len(req->src, src_len); in ahash_update_ctx()
820 if (src_nents < 0) { in ahash_update_ctx()
822 return src_nents; in ahash_update_ctx()
825 if (src_nents) { in ahash_update_ctx()
826 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
847 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
[all …]
Dcaamalg_qi2.c142 struct scatterlist *dst, int src_nents, in caam_unmap() argument
148 if (src_nents) in caam_unmap()
149 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
153 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
360 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
381 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
382 if (unlikely(src_nents < 0)) { in aead_edesc_alloc()
386 return ERR_PTR(src_nents); in aead_edesc_alloc()
397 if (src_nents) { in aead_edesc_alloc()
398 mapped_src_nents = dma_map_sg(dev, req->src, src_nents, in aead_edesc_alloc()
[all …]
Dcaamalg_qi.c800 int src_nents; member
822 int src_nents; member
870 struct scatterlist *dst, int src_nents, in caam_unmap() argument
876 if (src_nents) in caam_unmap()
877 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
881 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
897 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
909 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
948 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
974 src_nents = sg_nents_for_len(req->src, src_len); in aead_edesc_alloc()
[all …]
Dcaamalg_qi2.h110 int src_nents; member
130 int src_nents; member
147 int src_nents; member
Dcaampkc.c49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap()
257 int src_nents, dst_nents; in rsa_edesc_alloc() local
284 src_nents = sg_nents_for_len(req_ctx->fixup_src, in rsa_edesc_alloc()
288 if (!diff_size && src_nents == 1) in rsa_edesc_alloc()
291 sec4_sg_len = src_nents + !!diff_size; in rsa_edesc_alloc()
306 sgc = dma_map_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE); in rsa_edesc_alloc()
332 edesc->src_nents = src_nents; in rsa_edesc_alloc()
356 dma_unmap_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE); in rsa_edesc_alloc()
386 if (edesc->src_nents > 1) { in set_rsa_pub_pdb()
389 sec4_sg_index += edesc->src_nents; in set_rsa_pub_pdb()
[all …]
Dcaamalg.c885 int src_nents; member
909 int src_nents; member
921 struct scatterlist *dst, int src_nents, in caam_unmap() argument
927 if (src_nents) in caam_unmap()
928 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
932 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
947 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
958 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1033 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1282 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
[all …]
Dcaampkc.h124 int src_nents; member
/Linux-v5.4/drivers/crypto/qce/
Dablkcipher.c39 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_ablkcipher_done()
73 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ablkcipher_async_req_handle()
77 rctx->dst_nents = rctx->src_nents; in qce_ablkcipher_async_req_handle()
78 if (rctx->src_nents < 0) { in qce_ablkcipher_async_req_handle()
80 return rctx->src_nents; in qce_ablkcipher_async_req_handle()
118 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_ablkcipher_async_req_handle()
126 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents, in qce_ablkcipher_async_req_handle()
144 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_ablkcipher_async_req_handle()
Dsha.c46 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
86 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
87 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
89 return rctx->src_nents; in qce_ahash_async_req_handle()
92 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
102 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
120 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
Dcipher.h38 int src_nents; member
Dsha.h49 int src_nents; member
/Linux-v5.4/drivers/crypto/marvell/
Dcipher.c64 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
67 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
92 len = sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
317 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
329 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
388 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
421 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
422 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
424 return creq->src_nents; in mv_cesa_skcipher_req_init()
Dhash.c102 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
187 sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_std_step()
395 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
448 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
613 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
614 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
719 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
731 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
732 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
734 return creq->src_nents; in mv_cesa_ahash_req_init()
Dcesa.h564 int src_nents; member
610 int src_nents; member
/Linux-v5.4/drivers/crypto/
Dtalitos.c971 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local
981 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap()
982 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap()
986 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap()
987 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap()
1222 sg_count = edesc->src_nents ?: 1; in ipsec_esp()
1334 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len; in talitos_edesc_alloc() local
1349 src_nents = sg_nents_for_len(src, src_len); in talitos_edesc_alloc()
1350 if (src_nents < 0) { in talitos_edesc_alloc()
1354 src_nents = (src_nents == 1) ? 0 : src_nents; in talitos_edesc_alloc()
[all …]
Dpicoxcell_crypto.c317 int src_nents, dst_nents; in spacc_aead_make_ddts() local
325 src_nents = sg_nents_for_len(areq->src, total); in spacc_aead_make_ddts()
326 if (src_nents < 0) { in spacc_aead_make_ddts()
328 return src_nents; in spacc_aead_make_ddts()
330 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts()
340 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts()
356 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
365 dma_unmap_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
370 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
Dtalitos.h61 int src_nents; member
/Linux-v5.4/drivers/crypto/virtio/
Dvirtio_crypto_algs.c354 int src_nents, dst_nents; in __virtio_crypto_ablkcipher_do_req() local
364 src_nents = sg_nents_for_len(req->src, req->nbytes); in __virtio_crypto_ablkcipher_do_req()
368 src_nents, dst_nents); in __virtio_crypto_ablkcipher_do_req()
371 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_ablkcipher_do_req()
445 for (i = 0; i < src_nents; i++) in __virtio_crypto_ablkcipher_do_req()
/Linux-v5.4/drivers/crypto/ccp/
Dccp-dmaengine.c356 unsigned int src_nents, in ccp_create_desc() argument
375 if (!dst_nents || !src_nents) in ccp_create_desc()
392 src_nents--; in ccp_create_desc()
393 if (!src_nents) in ccp_create_desc()
/Linux-v5.4/drivers/crypto/rockchip/
Drk3288_crypto.h210 size_t src_nents; member
Drk3288_crypto_ablkcipher.c260 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->info, in rk_set_data_start()
281 dev->src_nents = sg_nents(req->src); in rk_ablk_start()
Drk3288_crypto.c119 if (!sg_pcopy_to_buffer(dev->first, dev->src_nents, in rk_load_data()
Drk3288_crypto_ahash.c206 dev->src_nents = sg_nents(req->src); in rk_ahash_start()
/Linux-v5.4/drivers/crypto/bcm/
Dcipher.c229 rctx->src_nents, chunksize); in spu_ablkcipher_tx_sg_create()
352 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_ablkcipher_req()
477 tx_frag_num += rctx->src_nents; in handle_ablkcipher_req()
643 rctx->src_nents, new_data_len); in spu_ahash_tx_sg_create()
805 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req()
908 tx_frag_num += rctx->src_nents; in handle_ahash_req()
1255 rctx->src_nents, datalen); in spu_aead_tx_sg_create()
1381 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_aead_req()
1540 tx_frag_num += rctx->src_nents; in handle_aead_req()
1772 rctx->src_nents = 0; in ablkcipher_enqueue()
[all …]
Dcipher.h284 int src_nents; /* Number of src entries with data */ member