Lines Matching refs:edesc

362 	struct aead_edesc *edesc;  in aead_edesc_alloc()  local
371 edesc = qi_cache_zalloc(GFP_DMA | flags); in aead_edesc_alloc()
372 if (unlikely(!edesc)) { in aead_edesc_alloc()
385 qi_cache_free(edesc); in aead_edesc_alloc()
393 qi_cache_free(edesc); in aead_edesc_alloc()
402 qi_cache_free(edesc); in aead_edesc_alloc()
416 qi_cache_free(edesc); in aead_edesc_alloc()
430 qi_cache_free(edesc); in aead_edesc_alloc()
438 qi_cache_free(edesc); in aead_edesc_alloc()
468 sg_table = &edesc->sgt[0]; in aead_edesc_alloc()
476 qi_cache_free(edesc); in aead_edesc_alloc()
491 qi_cache_free(edesc); in aead_edesc_alloc()
496 edesc->src_nents = src_nents; in aead_edesc_alloc()
497 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
498 edesc->iv_dma = iv_dma; in aead_edesc_alloc()
506 edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); in aead_edesc_alloc()
508 edesc->assoclen = cpu_to_caam32(req->assoclen); in aead_edesc_alloc()
509 edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, in aead_edesc_alloc()
511 if (dma_mapping_error(dev, edesc->assoclen_dma)) { in aead_edesc_alloc()
515 qi_cache_free(edesc); in aead_edesc_alloc()
519 dma_to_qm_sg_one(sg_table, edesc->assoclen_dma, 4, 0); in aead_edesc_alloc()
534 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
537 qi_cache_free(edesc); in aead_edesc_alloc()
541 edesc->qm_sg_dma = qm_sg_dma; in aead_edesc_alloc()
542 edesc->qm_sg_bytes = qm_sg_bytes; in aead_edesc_alloc()
583 return edesc; in aead_edesc_alloc()
1127 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1199 edesc = qi_cache_zalloc(GFP_DMA | flags); in skcipher_edesc_alloc()
1200 if (unlikely(!edesc)) { in skcipher_edesc_alloc()
1208 sg_table = &edesc->sgt[0]; in skcipher_edesc_alloc()
1217 qi_cache_free(edesc); in skcipher_edesc_alloc()
1221 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1222 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1223 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1224 edesc->qm_sg_bytes = qm_sg_bytes; in skcipher_edesc_alloc()
1235 edesc->qm_sg_dma = dma_map_single(dev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1237 if (dma_mapping_error(dev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1241 qi_cache_free(edesc); in skcipher_edesc_alloc()
1251 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in skcipher_edesc_alloc()
1256 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + in skcipher_edesc_alloc()
1259 dpaa2_fl_set_addr(out_fle, edesc->qm_sg_dma + dst_sg_idx * in skcipher_edesc_alloc()
1262 return edesc; in skcipher_edesc_alloc()
1265 static void aead_unmap(struct device *dev, struct aead_edesc *edesc, in aead_unmap() argument
1271 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in aead_unmap()
1272 edesc->iv_dma, ivsize, DMA_TO_DEVICE, edesc->qm_sg_dma, in aead_unmap()
1273 edesc->qm_sg_bytes); in aead_unmap()
1274 dma_unmap_single(dev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_unmap()
1277 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
1283 caam_unmap(dev, req->src, req->dst, edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
1284 edesc->iv_dma, ivsize, DMA_BIDIRECTIONAL, edesc->qm_sg_dma, in skcipher_unmap()
1285 edesc->qm_sg_bytes); in skcipher_unmap()
1294 struct aead_edesc *edesc = req_ctx->edesc; in aead_encrypt_done() local
1304 aead_unmap(ctx->dev, edesc, req); in aead_encrypt_done()
1305 qi_cache_free(edesc); in aead_encrypt_done()
1315 struct aead_edesc *edesc = req_ctx->edesc; in aead_decrypt_done() local
1325 aead_unmap(ctx->dev, edesc, req); in aead_decrypt_done()
1326 qi_cache_free(edesc); in aead_decrypt_done()
1332 struct aead_edesc *edesc; in aead_encrypt() local
1339 edesc = aead_edesc_alloc(req, true); in aead_encrypt()
1340 if (IS_ERR(edesc)) in aead_encrypt()
1341 return PTR_ERR(edesc); in aead_encrypt()
1347 caam_req->edesc = edesc; in aead_encrypt()
1351 aead_unmap(ctx->dev, edesc, req); in aead_encrypt()
1352 qi_cache_free(edesc); in aead_encrypt()
1360 struct aead_edesc *edesc; in aead_decrypt() local
1367 edesc = aead_edesc_alloc(req, false); in aead_decrypt()
1368 if (IS_ERR(edesc)) in aead_decrypt()
1369 return PTR_ERR(edesc); in aead_decrypt()
1375 caam_req->edesc = edesc; in aead_decrypt()
1379 aead_unmap(ctx->dev, edesc, req); in aead_decrypt()
1380 qi_cache_free(edesc); in aead_decrypt()
1403 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_encrypt_done() local
1414 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_encrypt_done()
1417 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_encrypt_done()
1419 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt_done()
1427 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_encrypt_done()
1430 qi_cache_free(edesc); in skcipher_encrypt_done()
1441 struct skcipher_edesc *edesc = req_ctx->edesc; in skcipher_decrypt_done() local
1452 edesc->src_nents > 1 ? 100 : ivsize, 1); in skcipher_decrypt_done()
1455 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_decrypt_done()
1457 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt_done()
1465 memcpy(req->iv, (u8 *)&edesc->sgt[0] + edesc->qm_sg_bytes, in skcipher_decrypt_done()
1468 qi_cache_free(edesc); in skcipher_decrypt_done()
1474 struct skcipher_edesc *edesc; in skcipher_encrypt() local
1484 edesc = skcipher_edesc_alloc(req); in skcipher_encrypt()
1485 if (IS_ERR(edesc)) in skcipher_encrypt()
1486 return PTR_ERR(edesc); in skcipher_encrypt()
1492 caam_req->edesc = edesc; in skcipher_encrypt()
1496 skcipher_unmap(ctx->dev, edesc, req); in skcipher_encrypt()
1497 qi_cache_free(edesc); in skcipher_encrypt()
1505 struct skcipher_edesc *edesc; in skcipher_decrypt() local
1514 edesc = skcipher_edesc_alloc(req); in skcipher_decrypt()
1515 if (IS_ERR(edesc)) in skcipher_decrypt()
1516 return PTR_ERR(edesc); in skcipher_decrypt()
1522 caam_req->edesc = edesc; in skcipher_decrypt()
1526 skcipher_unmap(ctx->dev, edesc, req); in skcipher_decrypt()
1527 qi_cache_free(edesc); in skcipher_decrypt()
3311 static inline void ahash_unmap(struct device *dev, struct ahash_edesc *edesc, in ahash_unmap() argument
3316 if (edesc->src_nents) in ahash_unmap()
3317 dma_unmap_sg(dev, req->src, edesc->src_nents, DMA_TO_DEVICE); in ahash_unmap()
3319 if (edesc->qm_sg_bytes) in ahash_unmap()
3320 dma_unmap_single(dev, edesc->qm_sg_dma, edesc->qm_sg_bytes, in ahash_unmap()
3331 struct ahash_edesc *edesc, in ahash_unmap_ctx() argument
3340 ahash_unmap(dev, edesc, req); in ahash_unmap_ctx()
3349 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done() local
3359 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done()
3361 qi_cache_free(edesc); in ahash_done()
3376 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_bi() local
3385 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_bi()
3387 qi_cache_free(edesc); in ahash_done_bi()
3406 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_src() local
3416 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_done_ctx_src()
3418 qi_cache_free(edesc); in ahash_done_ctx_src()
3433 struct ahash_edesc *edesc = state->caam_req.edesc; in ahash_done_ctx_dst() local
3442 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_done_ctx_dst()
3444 qi_cache_free(edesc); in ahash_done_ctx_dst()
3473 struct ahash_edesc *edesc; in ahash_update_ctx() local
3502 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_ctx()
3503 if (!edesc) { in ahash_update_ctx()
3509 edesc->src_nents = src_nents; in ahash_update_ctx()
3513 sg_table = &edesc->sgt[0]; in ahash_update_ctx()
3536 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_ctx()
3538 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_ctx()
3543 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_ctx()
3548 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_ctx()
3558 req_ctx->edesc = edesc; in ahash_update_ctx()
3580 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_update_ctx()
3581 qi_cache_free(edesc); in ahash_update_ctx()
3598 struct ahash_edesc *edesc; in ahash_final_ctx() local
3603 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_ctx()
3604 if (!edesc) in ahash_final_ctx()
3608 sg_table = &edesc->sgt[0]; in ahash_final_ctx()
3621 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_final_ctx()
3623 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_final_ctx()
3628 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_final_ctx()
3633 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_final_ctx()
3643 req_ctx->edesc = edesc; in ahash_final_ctx()
3651 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_final_ctx()
3652 qi_cache_free(edesc); in ahash_final_ctx()
3670 struct ahash_edesc *edesc; in ahash_finup_ctx() local
3692 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_ctx()
3693 if (!edesc) { in ahash_finup_ctx()
3698 edesc->src_nents = src_nents; in ahash_finup_ctx()
3702 sg_table = &edesc->sgt[0]; in ahash_finup_ctx()
3715 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_ctx()
3717 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_ctx()
3722 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_ctx()
3727 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_ctx()
3737 req_ctx->edesc = edesc; in ahash_finup_ctx()
3745 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL); in ahash_finup_ctx()
3746 qi_cache_free(edesc); in ahash_finup_ctx()
3762 struct ahash_edesc *edesc; in ahash_digest() local
3785 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_digest()
3786 if (!edesc) { in ahash_digest()
3791 edesc->src_nents = src_nents; in ahash_digest()
3796 struct dpaa2_sg_entry *sg_table = &edesc->sgt[0]; in ahash_digest()
3800 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_digest()
3802 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_digest()
3806 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_digest()
3808 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_digest()
3833 req_ctx->edesc = edesc; in ahash_digest()
3840 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_digest()
3841 qi_cache_free(edesc); in ahash_digest()
3858 struct ahash_edesc *edesc; in ahash_final_no_ctx() local
3862 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_final_no_ctx()
3863 if (!edesc) in ahash_final_no_ctx()
3905 req_ctx->edesc = edesc; in ahash_final_no_ctx()
3913 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_final_no_ctx()
3914 qi_cache_free(edesc); in ahash_final_no_ctx()
3934 struct ahash_edesc *edesc; in ahash_update_no_ctx() local
3962 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_no_ctx()
3963 if (!edesc) { in ahash_update_no_ctx()
3969 edesc->src_nents = src_nents; in ahash_update_no_ctx()
3972 sg_table = &edesc->sgt[0]; in ahash_update_no_ctx()
3985 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_no_ctx()
3987 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_no_ctx()
3992 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_no_ctx()
4007 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_no_ctx()
4017 req_ctx->edesc = edesc; in ahash_update_no_ctx()
4043 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_no_ctx()
4044 qi_cache_free(edesc); in ahash_update_no_ctx()
4061 struct ahash_edesc *edesc; in ahash_finup_no_ctx() local
4083 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_finup_no_ctx()
4084 if (!edesc) { in ahash_finup_no_ctx()
4089 edesc->src_nents = src_nents; in ahash_finup_no_ctx()
4091 sg_table = &edesc->sgt[0]; in ahash_finup_no_ctx()
4099 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes, in ahash_finup_no_ctx()
4101 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_finup_no_ctx()
4106 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_finup_no_ctx()
4121 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_finup_no_ctx()
4131 req_ctx->edesc = edesc; in ahash_finup_no_ctx()
4139 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE); in ahash_finup_no_ctx()
4140 qi_cache_free(edesc); in ahash_finup_no_ctx()
4158 struct ahash_edesc *edesc; in ahash_update_first() local
4187 edesc = qi_cache_zalloc(GFP_DMA | flags); in ahash_update_first()
4188 if (!edesc) { in ahash_update_first()
4194 edesc->src_nents = src_nents; in ahash_update_first()
4195 sg_table = &edesc->sgt[0]; in ahash_update_first()
4207 edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, in ahash_update_first()
4210 if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) { in ahash_update_first()
4215 edesc->qm_sg_bytes = qm_sg_bytes; in ahash_update_first()
4217 dpaa2_fl_set_addr(in_fle, edesc->qm_sg_dma); in ahash_update_first()
4245 req_ctx->edesc = edesc; in ahash_update_first()
4271 ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE); in ahash_update_first()
4272 qi_cache_free(edesc); in ahash_update_first()