Lines Matching refs:edesc
361 struct aead_edesc *edesc; in aead_edesc_alloc() local
370 edesc = qi_cache_zalloc(GFP_DMA | flags); in aead_edesc_alloc()
371 if (unlikely(!edesc)) { in aead_edesc_alloc()
384 qi_cache_free(edesc); in aead_edesc_alloc()
392 qi_cache_free(edesc); in aead_edesc_alloc()
401 qi_cache_free(edesc); in aead_edesc_alloc()
415 qi_cache_free(edesc); in aead_edesc_alloc()
429 qi_cache_free(edesc); in aead_edesc_alloc()
437 qi_cache_free(edesc); in aead_edesc_alloc()
467 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
475 qi_cache_free(edesc); in aead_edesc_alloc()
490 qi_cache_free(edesc); in aead_edesc_alloc()
495 edesc->src_nents = src_nents; in aead_edesc_alloc()
496 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
497 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
505 edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); in aead_edesc_alloc()
507 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
508 edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, in aead_edesc_alloc()
510 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc()
514 qi_cache_free(edesc); in aead_edesc_alloc()
518 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
533 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
536 qi_cache_free(edesc); in aead_edesc_alloc()
540 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
541 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
582 return edesc; in aead_edesc_alloc()
1117 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1189 edesc = qi_cache_zalloc(GFP_DMA | flags); in skcipher_edesc_alloc()
1190 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1198 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1207 qi_cache_free(edesc); in skcipher_edesc_alloc()
1211 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1212 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1213 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1214 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1225 edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1227 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1231 qi_cache_free(edesc); in skcipher_edesc_alloc()
1241 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in skcipher_edesc_alloc()
1246 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + in skcipher_edesc_alloc()
1249 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1252 return edesc; in skcipher_edesc_alloc()
1255 static void aead_unmap(struct device *dev, struct aead_edesc *edesc, in aead_unmap() argument
1261 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1262 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
1263 edesc->qm_sg_bytes); in aead_unmap()
1264 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
1267 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
1273 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1274 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
1275 edesc->qm_sg_bytes); in skcipher_unmap()
1284 struct aead_edesc *edesc = req_ctx->edesc; in aead_encrypt_done() local
1294 aead_unmap(ctx->dev, edesc, req); in aead_encrypt_done()
1295 qi_cache_free(edesc); in aead_encrypt_done()
1305 struct aead_edesc *edesc = req_ctx->edesc; in aead_decrypt_done() local
1315 aead_unmap(ctx->dev, edesc, req); in aead_decrypt_done()
1316 qi_cache_free(edesc); in aead_decrypt_done()
1322 struct aead_edesc *edesc; in aead_encrypt() local
1329 edesc = aead_edesc_alloc(req, true); in aead_encrypt()
1330 if (IS_ERR(edesc)) in aead_encrypt()
1331 return PTR_ERR(edesc); in aead_encrypt()
1337 caam_req->edesc = edesc; in aead_encrypt()
1341 aead_unmap(ctx->dev, edesc, req); in aead_encrypt()
1342 qi_cache_free(edesc); in aead_encrypt()
1350 struct aead_edesc *edesc; in aead_decrypt() local
1357 edesc = aead_edesc_alloc(req, false); in aead_decrypt()
1358 if (IS_ERR(edesc)) in aead_decrypt()
1359 return PTR_ERR(edesc); in aead_decrypt()
1365 caam_req->edesc = edesc; in aead_decrypt()
1369 aead_unmap(ctx->dev, edesc, req); in aead_decrypt()
1370 qi_cache_free(edesc); in aead_decrypt()
1393 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_encrypt_done() local
1404 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1407 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_encrypt_done()
1409 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt_done()
1417 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_encrypt_done()
1420 qi_cache_free(edesc); in skcipher_encrypt_done()
1431 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_decrypt_done() local
1442 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
1445 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_decrypt_done()
1447 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt_done()
1455 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_decrypt_done()
1458 qi_cache_free(edesc); in skcipher_decrypt_done()
1472 struct skcipher_edesc *edesc; in skcipher_encrypt() local
1501 edesc = skcipher_edesc_alloc(req); in skcipher_encrypt()
1502 if (IS_ERR(edesc)) in skcipher_encrypt()
1503 return PTR_ERR(edesc); in skcipher_encrypt()
1509 caam_req->edesc = edesc; in skcipher_encrypt()
1513 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt()
1514 qi_cache_free(edesc); in skcipher_encrypt()
1522 struct skcipher_edesc *edesc; in skcipher_decrypt() local
1551 edesc = skcipher_edesc_alloc(req); in skcipher_decrypt()
1552 if (IS_ERR(edesc)) in skcipher_decrypt()
1553 return PTR_ERR(edesc); in skcipher_decrypt()
1559 caam_req->edesc = edesc; in skcipher_decrypt()
1563 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt()
1564 qi_cache_free(edesc); in skcipher_decrypt()
3352 static inline void ahash_unmap(struct device *dev, struct ahash_edesc *edesc, in ahash_unmap() argument
3357 if (edesc->src_nents) in ahash_unmap()
3358 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3360 if (edesc->qm_sg_bytes) in ahash_unmap()
3361 dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, in ahash_unmap()
3372 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument
3381 ahash_unmap(dev, edesc, req); in ahash_unmap_ctx()
3390 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done() local
3400 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done()
3402 qi_cache_free(edesc); in ahash_done()
3417 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_bi() local
3426 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_bi()
3427 qi_cache_free(edesc); in ahash_done_bi()
3455 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_src() local
3465 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
3467 qi_cache_free(edesc); in ahash_done_ctx_src()
3482 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_dst() local
3491 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
3492 qi_cache_free(edesc); in ahash_done_ctx_dst()
3529 struct ahash_edesc *edesc; in ahash_update_ctx() local
3557 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_ctx()
3558 if (!edesc) { in ahash_update_ctx()
3564 edesc->src_nents = src_nents; in ahash_update_ctx()
3568 sg_table = &edesc->sgt[0]; in ahash_update_ctx()
3587 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_ctx()
3589 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_ctx()
3594 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_ctx()
3599 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_ctx()
3609 req_ctx->edesc = edesc; in ahash_update_ctx()
3628 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_update_ctx()
3629 qi_cache_free(edesc); in ahash_update_ctx()
3646 struct ahash_edesc *edesc; in ahash_final_ctx() local
3651 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_ctx()
3652 if (!edesc) in ahash_final_ctx()
3656 sg_table = &edesc->sgt[0]; in ahash_final_ctx()
3669 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_final_ctx()
3671 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_final_ctx()
3676 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_final_ctx()
3681 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_final_ctx()
3691 req_ctx->edesc = edesc; in ahash_final_ctx()
3699 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_final_ctx()
3700 qi_cache_free(edesc); in ahash_final_ctx()
3718 struct ahash_edesc *edesc; in ahash_finup_ctx() local
3740 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_ctx()
3741 if (!edesc) { in ahash_finup_ctx()
3746 edesc->src_nents = src_nents; in ahash_finup_ctx()
3750 sg_table = &edesc->sgt[0]; in ahash_finup_ctx()
3763 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_ctx()
3765 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_ctx()
3770 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_ctx()
3775 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_ctx()
3785 req_ctx->edesc = edesc; in ahash_finup_ctx()
3793 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
3794 qi_cache_free(edesc); in ahash_finup_ctx()
3810 struct ahash_edesc *edesc; in ahash_digest() local
3833 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_digest()
3834 if (!edesc) { in ahash_digest()
3839 edesc->src_nents = src_nents; in ahash_digest()
3844 struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; in ahash_digest()
3848 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_digest()
3850 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_digest()
3854 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_digest()
3856 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_digest()
3881 req_ctx->edesc = edesc; in ahash_digest()
3888 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_digest()
3889 qi_cache_free(edesc); in ahash_digest()
3906 struct ahash_edesc *edesc; in ahash_final_no_ctx() local
3910 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_no_ctx()
3911 if (!edesc) in ahash_final_no_ctx()
3953 req_ctx->edesc = edesc; in ahash_final_no_ctx()
3961 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_final_no_ctx()
3962 qi_cache_free(edesc); in ahash_final_no_ctx()
3981 struct ahash_edesc *edesc; in ahash_update_no_ctx() local
4009 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_no_ctx()
4010 if (!edesc) { in ahash_update_no_ctx()
4016 edesc->src_nents = src_nents; in ahash_update_no_ctx()
4019 sg_table = &edesc->sgt[0]; in ahash_update_no_ctx()
4027 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_no_ctx()
4029 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_no_ctx()
4034 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_no_ctx()
4049 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_no_ctx()
4059 req_ctx->edesc = edesc; in ahash_update_no_ctx()
4082 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_no_ctx()
4083 qi_cache_free(edesc); in ahash_update_no_ctx()
4100 struct ahash_edesc *edesc; in ahash_finup_no_ctx() local
4122 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_no_ctx()
4123 if (!edesc) { in ahash_finup_no_ctx()
4128 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4130 sg_table = &edesc->sgt[0]; in ahash_finup_no_ctx()
4138 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_no_ctx()
4140 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_no_ctx()
4145 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_no_ctx()
4160 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_no_ctx()
4170 req_ctx->edesc = edesc; in ahash_finup_no_ctx()
4178 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_finup_no_ctx()
4179 qi_cache_free(edesc); in ahash_finup_no_ctx()
4198 struct ahash_edesc *edesc; in ahash_update_first() local
4227 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_first()
4228 if (!edesc) { in ahash_update_first()
4234 edesc->src_nents = src_nents; in ahash_update_first()
4235 sg_table = &edesc->sgt[0]; in ahash_update_first()
4247 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_first()
4250 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_first()
4255 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_first()
4257 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_first()
4281 req_ctx->edesc = edesc; in ahash_update_first()
4307 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_first()
4308 qi_cache_free(edesc); in ahash_update_first()