| /Linux-v5.4/drivers/crypto/caam/ |
| D | caampkc.c | 43 static void rsa_io_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_io_unmap() argument 48 dma_unmap_sg(dev, req->dst, edesc->dst_nents, DMA_FROM_DEVICE); in rsa_io_unmap() 49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap() 51 if (edesc->sec4_sg_bytes) in rsa_io_unmap() 52 dma_unmap_single(dev, edesc->sec4_sg_dma, edesc->sec4_sg_bytes, in rsa_io_unmap() 56 static void rsa_pub_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_pub_unmap() argument 62 struct rsa_pub_pdb *pdb = &edesc->pdb.pub; in rsa_pub_unmap() 68 static void rsa_priv_f1_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_priv_f1_unmap() argument 74 struct rsa_priv_f1_pdb *pdb = &edesc->pdb.priv_f1; in rsa_priv_f1_unmap() 80 static void rsa_priv_f2_unmap(struct device *dev, struct rsa_edesc *edesc, in rsa_priv_f2_unmap() argument [all …]
|
| D | caamalg.c | 943 struct aead_edesc *edesc, in aead_unmap() argument 947 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap() 948 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in aead_unmap() 951 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument 958 edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 959 edesc->iv_dma, ivsize, in skcipher_unmap() 960 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in skcipher_unmap() 967 struct aead_edesc *edesc; in aead_encrypt_done() local 972 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); in aead_encrypt_done() 977 aead_unmap(jrdev, edesc, req); in aead_encrypt_done() [all …]
|
| D | caamhash.c | 568 struct ahash_edesc *edesc, in ahash_unmap() argument 573 if (edesc->src_nents) in ahash_unmap() 574 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap() 576 if (edesc->sec4_sg_bytes) in ahash_unmap() 577 dma_unmap_single(dev, edesc->sec4_sg_dma, in ahash_unmap() 578 edesc->sec4_sg_bytes, DMA_TO_DEVICE); in ahash_unmap() 588 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument 597 ahash_unmap(dev, edesc, req, dst_len); in ahash_unmap_ctx() 604 struct ahash_edesc *edesc; in ahash_done() local 613 edesc = container_of(desc, struct ahash_edesc, hw_desc[0]); in ahash_done() [all …]
|
| D | caamalg_qi2.c | 362 struct aead_edesc *edesc; in aead_edesc_alloc() local 371 edesc = qi_cache_zalloc(GFP_DMA | flags); in aead_edesc_alloc() 372 if (unlikely(!edesc)) { in aead_edesc_alloc() 385 qi_cache_free(edesc); in aead_edesc_alloc() 393 qi_cache_free(edesc); in aead_edesc_alloc() 402 qi_cache_free(edesc); in aead_edesc_alloc() 416 qi_cache_free(edesc); in aead_edesc_alloc() 430 qi_cache_free(edesc); in aead_edesc_alloc() 438 qi_cache_free(edesc); in aead_edesc_alloc() 468 sg_table = &edesc->sgt[0]; in aead_edesc_alloc() [all …]
|
| D | caamalg_qi.c | 891 struct aead_edesc *edesc, in aead_unmap() argument 897 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap() 898 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap() 899 edesc->qm_sg_bytes); in aead_unmap() 900 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap() 903 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument 909 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap() 910 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap() 911 edesc->qm_sg_bytes); in skcipher_unmap() 917 struct aead_edesc *edesc; in aead_done() local [all …]
|
| D | caamalg_qi2.h | 188 void *edesc; member
|
| /Linux-v5.4/drivers/dma/ti/ |
| D | edma.c | 227 struct edma_desc *edesc; member 766 struct edma_desc *edesc; in edma_execute() local 770 if (!echan->edesc) { in edma_execute() 776 echan->edesc = to_edma_desc(&vdesc->tx); in edma_execute() 779 edesc = echan->edesc; in edma_execute() 782 left = edesc->pset_nr - edesc->processed; in edma_execute() 784 edesc->sg_len = 0; in edma_execute() 788 j = i + edesc->processed; in edma_execute() 789 edma_write_slot(ecc, echan->slot[i], &edesc->pset[j].param); in edma_execute() 790 edesc->sg_len += edesc->pset[j].len; in edma_execute() [all …]
|
| /Linux-v5.4/drivers/crypto/ |
| D | talitos.c | 325 struct talitos_edesc *edesc; in get_request_hdr() local 333 edesc = container_of(request->desc, struct talitos_edesc, desc); in get_request_hdr() 335 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1; in get_request_hdr() 490 struct talitos_edesc *edesc; local 492 edesc = container_of(priv->chan[ch].fifo[iter].desc, 495 (edesc->buf + edesc->dma_len))->hdr; 964 struct talitos_edesc *edesc, in talitos_sg_unmap() argument 971 unsigned int src_nents = edesc->src_nents ? : 1; in talitos_sg_unmap() 972 unsigned int dst_nents = edesc->dst_nents ? : 1; in talitos_sg_unmap() 975 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset, in talitos_sg_unmap() [all …]
|
| /Linux-v5.4/drivers/dma/ |
| D | fsl-edma-common.c | 163 fsl_chan->edesc = NULL; in fsl_edma_terminate_all() 178 if (fsl_chan->edesc) { in fsl_edma_pause() 194 if (fsl_chan->edesc) { in fsl_edma_resume() 268 struct fsl_edma_desc *edesc = fsl_chan->edesc; in fsl_edma_desc_residue() local 271 enum dma_transfer_direction dir = edesc->dirn; in fsl_edma_desc_residue() 277 for (len = i = 0; i < fsl_chan->edesc->n_tcds; i++) in fsl_edma_desc_residue() 278 len += le32_to_cpu(edesc->tcd[i].vtcd->nbytes) in fsl_edma_desc_residue() 279 * le16_to_cpu(edesc->tcd[i].vtcd->biter); in fsl_edma_desc_residue() 290 for (i = 0; i < fsl_chan->edesc->n_tcds; i++) { in fsl_edma_desc_residue() 291 size = le32_to_cpu(edesc->tcd[i].vtcd->nbytes) in fsl_edma_desc_residue() [all …]
|
| D | mcf-edma.c | 38 if (!mcf_chan->edesc->iscyclic) { in mcf_edma_tx_handler() 39 list_del(&mcf_chan->edesc->vdesc.node); in mcf_edma_tx_handler() 40 vchan_cookie_complete(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler() 41 mcf_chan->edesc = NULL; in mcf_edma_tx_handler() 45 vchan_cyclic_callback(&mcf_chan->edesc->vdesc); in mcf_edma_tx_handler() 48 if (!mcf_chan->edesc) in mcf_edma_tx_handler()
|
| D | fsl-edma.c | 48 if (!fsl_chan->edesc->iscyclic) { in fsl_edma_tx_handler() 49 list_del(&fsl_chan->edesc->vdesc.node); in fsl_edma_tx_handler() 50 vchan_cookie_complete(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler() 51 fsl_chan->edesc = NULL; in fsl_edma_tx_handler() 55 vchan_cyclic_callback(&fsl_chan->edesc->vdesc); in fsl_edma_tx_handler() 58 if (!fsl_chan->edesc) in fsl_edma_tx_handler()
|
| D | fsl-edma-common.h | 121 struct fsl_edma_desc *edesc; member
|