Lines Matching refs:edesc
364 struct aead_edesc *edesc; in aead_edesc_alloc() local
373 edesc = qi_cache_zalloc(GFP_DMA | flags); in aead_edesc_alloc()
374 if (unlikely(!edesc)) { in aead_edesc_alloc()
387 qi_cache_free(edesc); in aead_edesc_alloc()
395 qi_cache_free(edesc); in aead_edesc_alloc()
404 qi_cache_free(edesc); in aead_edesc_alloc()
418 qi_cache_free(edesc); in aead_edesc_alloc()
432 qi_cache_free(edesc); in aead_edesc_alloc()
440 qi_cache_free(edesc); in aead_edesc_alloc()
470 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
478 qi_cache_free(edesc); in aead_edesc_alloc()
493 qi_cache_free(edesc); in aead_edesc_alloc()
498 edesc->src_nents = src_nents; in aead_edesc_alloc()
499 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
500 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
508 edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); in aead_edesc_alloc()
510 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
511 edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, in aead_edesc_alloc()
513 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc()
517 qi_cache_free(edesc); in aead_edesc_alloc()
521 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
536 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
539 qi_cache_free(edesc); in aead_edesc_alloc()
543 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
544 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
585 return edesc; in aead_edesc_alloc()
1120 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1192 edesc = qi_cache_zalloc(GFP_DMA | flags); in skcipher_edesc_alloc()
1193 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1201 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1210 qi_cache_free(edesc); in skcipher_edesc_alloc()
1214 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1215 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1216 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1217 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1228 edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1230 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1234 qi_cache_free(edesc); in skcipher_edesc_alloc()
1244 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in skcipher_edesc_alloc()
1249 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + in skcipher_edesc_alloc()
1252 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1255 return edesc; in skcipher_edesc_alloc()
1258 static void aead_unmap(struct device *dev, struct aead_edesc *edesc, in aead_unmap() argument
1264 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1265 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
1266 edesc->qm_sg_bytes); in aead_unmap()
1267 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
1270 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
1276 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1277 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
1278 edesc->qm_sg_bytes); in skcipher_unmap()
1287 struct aead_edesc *edesc = req_ctx->edesc; in aead_encrypt_done() local
1297 aead_unmap(ctx->dev, edesc, req); in aead_encrypt_done()
1298 qi_cache_free(edesc); in aead_encrypt_done()
1308 struct aead_edesc *edesc = req_ctx->edesc; in aead_decrypt_done() local
1318 aead_unmap(ctx->dev, edesc, req); in aead_decrypt_done()
1319 qi_cache_free(edesc); in aead_decrypt_done()
1325 struct aead_edesc *edesc; in aead_encrypt() local
1332 edesc = aead_edesc_alloc(req, true); in aead_encrypt()
1333 if (IS_ERR(edesc)) in aead_encrypt()
1334 return PTR_ERR(edesc); in aead_encrypt()
1340 caam_req->edesc = edesc; in aead_encrypt()
1344 aead_unmap(ctx->dev, edesc, req); in aead_encrypt()
1345 qi_cache_free(edesc); in aead_encrypt()
1353 struct aead_edesc *edesc; in aead_decrypt() local
1360 edesc = aead_edesc_alloc(req, false); in aead_decrypt()
1361 if (IS_ERR(edesc)) in aead_decrypt()
1362 return PTR_ERR(edesc); in aead_decrypt()
1368 caam_req->edesc = edesc; in aead_decrypt()
1372 aead_unmap(ctx->dev, edesc, req); in aead_decrypt()
1373 qi_cache_free(edesc); in aead_decrypt()
1396 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_encrypt_done() local
1407 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1410 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_encrypt_done()
1412 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt_done()
1420 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_encrypt_done()
1423 qi_cache_free(edesc); in skcipher_encrypt_done()
1434 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_decrypt_done() local
1445 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
1448 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_decrypt_done()
1450 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt_done()
1458 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_decrypt_done()
1461 qi_cache_free(edesc); in skcipher_decrypt_done()
1475 struct skcipher_edesc *edesc; in skcipher_encrypt() local
1504 edesc = skcipher_edesc_alloc(req); in skcipher_encrypt()
1505 if (IS_ERR(edesc)) in skcipher_encrypt()
1506 return PTR_ERR(edesc); in skcipher_encrypt()
1512 caam_req->edesc = edesc; in skcipher_encrypt()
1516 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt()
1517 qi_cache_free(edesc); in skcipher_encrypt()
1525 struct skcipher_edesc *edesc; in skcipher_decrypt() local
1554 edesc = skcipher_edesc_alloc(req); in skcipher_decrypt()
1555 if (IS_ERR(edesc)) in skcipher_decrypt()
1556 return PTR_ERR(edesc); in skcipher_decrypt()
1562 caam_req->edesc = edesc; in skcipher_decrypt()
1566 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt()
1567 qi_cache_free(edesc); in skcipher_decrypt()
3356 static inline void ahash_unmap(struct device *dev, struct ahash_edesc *edesc, in ahash_unmap() argument
3361 if (edesc->src_nents) in ahash_unmap()
3362 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3364 if (edesc->qm_sg_bytes) in ahash_unmap()
3365 dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, in ahash_unmap()
3376 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument
3385 ahash_unmap(dev, edesc, req); in ahash_unmap_ctx()
3394 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done() local
3404 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done()
3406 qi_cache_free(edesc); in ahash_done()
3421 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_bi() local
3430 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_bi()
3431 qi_cache_free(edesc); in ahash_done_bi()
3459 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_src() local
3469 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
3471 qi_cache_free(edesc); in ahash_done_ctx_src()
3486 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_dst() local
3495 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
3496 qi_cache_free(edesc); in ahash_done_ctx_dst()
3533 struct ahash_edesc *edesc; in ahash_update_ctx() local
3561 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_ctx()
3562 if (!edesc) { in ahash_update_ctx()
3568 edesc->src_nents = src_nents; in ahash_update_ctx()
3572 sg_table = &edesc->sgt[0]; in ahash_update_ctx()
3591 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_ctx()
3593 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_ctx()
3598 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_ctx()
3603 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_ctx()
3613 req_ctx->edesc = edesc; in ahash_update_ctx()
3632 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_update_ctx()
3633 qi_cache_free(edesc); in ahash_update_ctx()
3650 struct ahash_edesc *edesc; in ahash_final_ctx() local
3655 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_ctx()
3656 if (!edesc) in ahash_final_ctx()
3660 sg_table = &edesc->sgt[0]; in ahash_final_ctx()
3673 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_final_ctx()
3675 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_final_ctx()
3680 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_final_ctx()
3685 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_final_ctx()
3695 req_ctx->edesc = edesc; in ahash_final_ctx()
3703 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_final_ctx()
3704 qi_cache_free(edesc); in ahash_final_ctx()
3722 struct ahash_edesc *edesc; in ahash_finup_ctx() local
3744 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_ctx()
3745 if (!edesc) { in ahash_finup_ctx()
3750 edesc->src_nents = src_nents; in ahash_finup_ctx()
3754 sg_table = &edesc->sgt[0]; in ahash_finup_ctx()
3767 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_ctx()
3769 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_ctx()
3774 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_ctx()
3779 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_ctx()
3789 req_ctx->edesc = edesc; in ahash_finup_ctx()
3797 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
3798 qi_cache_free(edesc); in ahash_finup_ctx()
3814 struct ahash_edesc *edesc; in ahash_digest() local
3837 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_digest()
3838 if (!edesc) { in ahash_digest()
3843 edesc->src_nents = src_nents; in ahash_digest()
3848 struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; in ahash_digest()
3852 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_digest()
3854 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_digest()
3858 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_digest()
3860 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_digest()
3885 req_ctx->edesc = edesc; in ahash_digest()
3892 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_digest()
3893 qi_cache_free(edesc); in ahash_digest()
3910 struct ahash_edesc *edesc; in ahash_final_no_ctx() local
3914 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_no_ctx()
3915 if (!edesc) in ahash_final_no_ctx()
3957 req_ctx->edesc = edesc; in ahash_final_no_ctx()
3965 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_final_no_ctx()
3966 qi_cache_free(edesc); in ahash_final_no_ctx()
3985 struct ahash_edesc *edesc; in ahash_update_no_ctx() local
4013 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_no_ctx()
4014 if (!edesc) { in ahash_update_no_ctx()
4020 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4023 sg_table = &edesc->sgt[0]; in ahash_update_no_ctx()
4031 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_no_ctx()
4033 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_no_ctx()
4038 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_no_ctx()
4053 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_no_ctx()
4063 req_ctx->edesc = edesc; in ahash_update_no_ctx()
4086 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_no_ctx()
4087 qi_cache_free(edesc); in ahash_update_no_ctx()
4104 struct ahash_edesc *edesc; in ahash_finup_no_ctx() local
4126 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_no_ctx()
4127 if (!edesc) { in ahash_finup_no_ctx()
4132 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4134 sg_table = &edesc->sgt[0]; in ahash_finup_no_ctx()
4142 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_no_ctx()
4144 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_no_ctx()
4149 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_no_ctx()
4164 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_no_ctx()
4174 req_ctx->edesc = edesc; in ahash_finup_no_ctx()
4182 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_finup_no_ctx()
4183 qi_cache_free(edesc); in ahash_finup_no_ctx()
4202 struct ahash_edesc *edesc; in ahash_update_first() local
4231 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_first()
4232 if (!edesc) { in ahash_update_first()
4238 edesc->src_nents = src_nents; in ahash_update_first()
4239 sg_table = &edesc->sgt[0]; in ahash_update_first()
4251 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_first()
4254 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_first()
4259 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_first()
4261 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_first()
4285 req_ctx->edesc = edesc; in ahash_update_first()
4311 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_first()
4312 qi_cache_free(edesc); in ahash_update_first()