Home
last modified time | relevance | path

Searched refs:src_nents (Results 1 – 21 of 21) sorted by relevance

/Linux-v4.19/drivers/crypto/caam/
Dcaamhash.c498 int src_nents; member
510 if (edesc->src_nents) in ahash_unmap()
511 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
753 int src_nents, mapped_nents, sec4_sg_bytes, sec4_sg_src_index; in ahash_update_ctx() local
762 src_nents = sg_nents_for_len(req->src, in ahash_update_ctx()
764 if (src_nents < 0) { in ahash_update_ctx()
766 return src_nents; in ahash_update_ctx()
769 if (src_nents) { in ahash_update_ctx()
770 mapped_nents = dma_map_sg(jrdev, req->src, src_nents, in ahash_update_ctx()
792 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in ahash_update_ctx()
[all …]
Dcaamalg_qi.c732 int src_nents; member
754 int src_nents; member
804 struct scatterlist *dst, int src_nents, in caam_unmap() argument
810 if (src_nents) in caam_unmap()
811 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
814 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
832 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
845 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in ablkcipher_unmap()
896 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; in aead_edesc_alloc() local
919 src_nents = sg_nents_for_len(req->src, req->assoclen + in aead_edesc_alloc()
[all …]
Dcaamalg.c759 int src_nents; member
780 int src_nents; member
791 struct scatterlist *dst, int src_nents, in caam_unmap() argument
798 if (src_nents) in caam_unmap()
799 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in caam_unmap()
802 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in caam_unmap()
817 edesc->src_nents, edesc->dst_nents, 0, 0, DMA_NONE, in aead_unmap()
829 edesc->src_nents, edesc->dst_nents, in ablkcipher_unmap()
904 edesc->src_nents > 1 ? 100 : ivsize, 1); in ablkcipher_encrypt_done()
991 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0; in init_aead_job()
[all …]
Dcaampkc.c30 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap()
225 int src_nents, dst_nents; in rsa_edesc_alloc() local
235 src_nents = sg_nents_for_len(req->src, req->src_len); in rsa_edesc_alloc()
238 if (src_nents > 1) in rsa_edesc_alloc()
239 sec4_sg_len = src_nents; in rsa_edesc_alloc()
251 sgc = dma_map_sg(dev, req->src, src_nents, DMA_TO_DEVICE); in rsa_edesc_alloc()
266 if (src_nents > 1) { in rsa_edesc_alloc()
267 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0); in rsa_edesc_alloc()
268 sec4_sg_index += src_nents; in rsa_edesc_alloc()
275 edesc->src_nents = src_nents; in rsa_edesc_alloc()
[all …]
Dcaampkc.h117 int src_nents; member
/Linux-v4.19/drivers/crypto/qce/
Dablkcipher.c47 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); in qce_ablkcipher_done()
81 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ablkcipher_async_req_handle()
85 rctx->dst_nents = rctx->src_nents; in qce_ablkcipher_async_req_handle()
86 if (rctx->src_nents < 0) { in qce_ablkcipher_async_req_handle()
88 return rctx->src_nents; in qce_ablkcipher_async_req_handle()
126 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_ablkcipher_async_req_handle()
134 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents, in qce_ablkcipher_async_req_handle()
152 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src); in qce_ablkcipher_async_req_handle()
Dsha.c54 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_done()
94 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); in qce_ahash_async_req_handle()
95 if (rctx->src_nents < 0) { in qce_ahash_async_req_handle()
97 return rctx->src_nents; in qce_ahash_async_req_handle()
100 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
110 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, in qce_ahash_async_req_handle()
128 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); in qce_ahash_async_req_handle()
Dcipher.h46 int src_nents; member
Dsha.h57 int src_nents; member
/Linux-v4.19/drivers/crypto/marvell/
Dcipher.c67 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
70 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
95 len = sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
331 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
343 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
403 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
436 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
437 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
439 return creq->src_nents; in mv_cesa_skcipher_req_init()
Dhash.c105 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
191 sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_std_step()
399 sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, in mv_cesa_ahash_req_cleanup()
452 sg_pcopy_to_buffer(req->src, creq->src_nents, in mv_cesa_ahash_cache_req()
617 if (creq->src_nents) { in mv_cesa_ahash_dma_req_init()
618 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_ahash_dma_req_init()
723 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_req_init()
735 creq->src_nents = sg_nents_for_len(req->src, req->nbytes); in mv_cesa_ahash_req_init()
736 if (creq->src_nents < 0) { in mv_cesa_ahash_req_init()
738 return creq->src_nents; in mv_cesa_ahash_req_init()
Dcesa.h564 int src_nents; member
610 int src_nents; member
/Linux-v4.19/drivers/crypto/
Dtalitos.c933 int src_nents; member
954 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() local
964 if (src_nents == 1 || !is_sec1) in talitos_sg_unmap()
965 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); in talitos_sg_unmap()
969 } else if (src_nents == 1 || !is_sec1) { in talitos_sg_unmap()
970 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); in talitos_sg_unmap()
1030 icvdata = &edesc->link_tbl[edesc->src_nents + in ipsec_esp_encrypt_done()
1072 &edesc->link_tbl[edesc->src_nents + in ipsec_esp_decrypt_swauth_done()
1223 sg_count = edesc->src_nents ?: 1; in ipsec_esp()
1286 int offset = (edesc->src_nents + edesc->dst_nents + 2) * in ipsec_esp()
[all …]
Dpicoxcell_crypto.c330 int src_nents, dst_nents; in spacc_aead_make_ddts() local
338 src_nents = sg_nents_for_len(areq->src, total); in spacc_aead_make_ddts()
339 if (src_nents < 0) { in spacc_aead_make_ddts()
341 return src_nents; in spacc_aead_make_ddts()
343 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts()
353 if (src_nents + 1 > MAX_DDT_LEN) in spacc_aead_make_ddts()
369 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
378 dma_unmap_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
383 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
Dmxc-scc.c155 size_t src_nents; member
220 ctx->src_nents = nents; in mxc_scc_ablkcipher_req_init()
274 len = sg_pcopy_to_buffer(req->src, ctx->src_nents, in mxc_scc_put_data()
/Linux-v4.19/drivers/crypto/virtio/
Dvirtio_crypto_algs.c368 int src_nents, dst_nents; in __virtio_crypto_ablkcipher_do_req() local
378 src_nents = sg_nents_for_len(req->src, req->nbytes); in __virtio_crypto_ablkcipher_do_req()
382 src_nents, dst_nents); in __virtio_crypto_ablkcipher_do_req()
385 sg_total = src_nents + dst_nents + 3; in __virtio_crypto_ablkcipher_do_req()
459 for (i = 0; i < src_nents; i++) in __virtio_crypto_ablkcipher_do_req()
/Linux-v4.19/drivers/crypto/ccp/
Dccp-dmaengine.c354 unsigned int src_nents, in ccp_create_desc() argument
373 if (!dst_nents || !src_nents) in ccp_create_desc()
390 src_nents--; in ccp_create_desc()
391 if (!src_nents) in ccp_create_desc()
/Linux-v4.19/drivers/crypto/chelsio/
Dchcr_crypto.h189 short int src_nents; member
Dchcr_algo.c2220 reqctx->src_nents = sg_nents_xlen(req->src, req->cryptlen, in chcr_aead_common_init()
2307 : (sgl_len(reqctx->src_nents + reqctx->aad_nents in create_authenc_wr()
2872 (sgl_len(reqctx->src_nents + reqctx->aad_nents + in create_aead_ccm_wr()
2960 (sgl_len(reqctx->src_nents + in create_gcm_wr()
/Linux-v4.19/drivers/crypto/bcm/
Dcipher.c240 rctx->src_nents, chunksize); in spu_ablkcipher_tx_sg_create()
363 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_ablkcipher_req()
488 tx_frag_num += rctx->src_nents; in handle_ablkcipher_req()
654 rctx->src_nents, new_data_len); in spu_ahash_tx_sg_create()
816 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, in handle_ahash_req()
919 tx_frag_num += rctx->src_nents; in handle_ahash_req()
1266 rctx->src_nents, datalen); in spu_aead_tx_sg_create()
1392 rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); in handle_aead_req()
1551 tx_frag_num += rctx->src_nents; in handle_aead_req()
1787 rctx->src_nents = 0; in ablkcipher_enqueue()
[all …]
Dcipher.h297 int src_nents; /* Number of src entries with data */ member