Lines Matching refs:edesc

891 		       struct aead_edesc *edesc,  in aead_unmap()  argument
897 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
898 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
899 edesc->qm_sg_bytes); in aead_unmap()
900 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
903 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
909 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
910 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
911 edesc->qm_sg_bytes); in skcipher_unmap()
917 struct aead_edesc *edesc; in aead_done() local
928 edesc = container_of(drv_req, typeof(*edesc), drv_req); in aead_done()
929 aead_unmap(qidev, edesc, aead_req); in aead_done()
932 qi_cache_free(edesc); in aead_done()
950 struct aead_edesc *edesc; in aead_edesc_alloc() local
964 edesc = qi_cache_alloc(GFP_DMA | flags); in aead_edesc_alloc()
965 if (unlikely(!edesc)) { in aead_edesc_alloc()
978 qi_cache_free(edesc); in aead_edesc_alloc()
986 qi_cache_free(edesc); in aead_edesc_alloc()
997 qi_cache_free(edesc); in aead_edesc_alloc()
1005 qi_cache_free(edesc); in aead_edesc_alloc()
1014 qi_cache_free(edesc); in aead_edesc_alloc()
1029 qi_cache_free(edesc); in aead_edesc_alloc()
1061 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
1069 qi_cache_free(edesc); in aead_edesc_alloc()
1084 qi_cache_free(edesc); in aead_edesc_alloc()
1089 edesc->src_nents = src_nents; in aead_edesc_alloc()
1090 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1091 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
1092 edesc->drv_req.app_ctx = req; in aead_edesc_alloc()
1093 edesc->drv_req.cbk = aead_done; in aead_edesc_alloc()
1094 edesc->drv_req.drv_ctx = drv_ctx; in aead_edesc_alloc()
1096 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
1097 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, in aead_edesc_alloc()
1099 if (dma_mapping_error(qidev, edesc->assoclen_dma)) { in aead_edesc_alloc()
1103 qi_cache_free(edesc); in aead_edesc_alloc()
1107 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
1122 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
1125 qi_cache_free(edesc); in aead_edesc_alloc()
1129 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
1130 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
1136 fd_sgt = &edesc->drv_req.fd_sgt[0]; in aead_edesc_alloc()
1155 return edesc; in aead_edesc_alloc()
1160 struct aead_edesc *edesc; in aead_crypt() local
1169 edesc = aead_edesc_alloc(req, encrypt); in aead_crypt()
1170 if (IS_ERR_OR_NULL(edesc)) in aead_crypt()
1171 return PTR_ERR(edesc); in aead_crypt()
1174 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in aead_crypt()
1178 aead_unmap(ctx->qidev, edesc, req); in aead_crypt()
1179 qi_cache_free(edesc); in aead_crypt()
1209 struct skcipher_edesc *edesc; in skcipher_done() local
1219 edesc = container_of(drv_req, typeof(*edesc), drv_req); in skcipher_done()
1226 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_done()
1229 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_done()
1231 skcipher_unmap(qidev, edesc, req); in skcipher_done()
1239 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_done()
1242 qi_cache_free(edesc); in skcipher_done()
1255 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1332 edesc = qi_cache_alloc(GFP_DMA | flags); in skcipher_edesc_alloc()
1333 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1341 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1350 qi_cache_free(edesc); in skcipher_edesc_alloc()
1354 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1355 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1356 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1357 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1358 edesc->drv_req.app_ctx = req; in skcipher_edesc_alloc()
1359 edesc->drv_req.cbk = skcipher_done; in skcipher_edesc_alloc()
1360 edesc->drv_req.drv_ctx = drv_ctx; in skcipher_edesc_alloc()
1371 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1373 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1377 qi_cache_free(edesc); in skcipher_edesc_alloc()
1381 fd_sgt = &edesc->drv_req.fd_sgt[0]; in skcipher_edesc_alloc()
1383 dma_to_qm_sg_one_last_ext(&fd_sgt[1], edesc->qm_sg_dma, in skcipher_edesc_alloc()
1387 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + in skcipher_edesc_alloc()
1391 dma_to_qm_sg_one_ext(&fd_sgt[0], edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1395 return edesc; in skcipher_edesc_alloc()
1400 struct skcipher_edesc *edesc; in skcipher_crypt() local
1412 edesc = skcipher_edesc_alloc(req, encrypt); in skcipher_crypt()
1413 if (IS_ERR(edesc)) in skcipher_crypt()
1414 return PTR_ERR(edesc); in skcipher_crypt()
1416 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in skcipher_crypt()
1420 skcipher_unmap(ctx->qidev, edesc, req); in skcipher_crypt()
1421 qi_cache_free(edesc); in skcipher_crypt()