Lines Matching refs:edesc

132 	struct skcipher_edesc *edesc;  member
137 struct aead_edesc *edesc; member
958 struct aead_edesc *edesc, in aead_unmap() argument
962 edesc->src_nents, edesc->dst_nents, 0, 0, in aead_unmap()
963 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in aead_unmap()
966 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, in skcipher_unmap() argument
973 edesc->src_nents, edesc->dst_nents, in skcipher_unmap()
974 edesc->iv_dma, ivsize, in skcipher_unmap()
975 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); in skcipher_unmap()
984 struct aead_edesc *edesc; in aead_crypt_done() local
990 edesc = rctx->edesc; in aead_crypt_done()
991 has_bklog = edesc->bklog; in aead_crypt_done()
996 aead_unmap(jrdev, edesc, req); in aead_crypt_done()
998 kfree(edesc); in aead_crypt_done()
1010 static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc) in skcipher_edesc_iv() argument
1013 return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, in skcipher_edesc_iv()
1021 struct skcipher_edesc *edesc; in skcipher_crypt_done() local
1031 edesc = rctx->edesc; in skcipher_crypt_done()
1032 has_bklog = edesc->bklog; in skcipher_crypt_done()
1036 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt_done()
1044 memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize); in skcipher_crypt_done()
1053 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); in skcipher_crypt_done()
1055 kfree(edesc); in skcipher_crypt_done()
1071 struct aead_edesc *edesc, in init_aead_job() argument
1077 u32 *desc = edesc->hw_desc; in init_aead_job()
1091 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : in init_aead_job()
1095 src_dma = edesc->sec4_sg_dma; in init_aead_job()
1096 sec4_sg_index += edesc->mapped_src_nents; in init_aead_job()
1107 if (!edesc->mapped_dst_nents) { in init_aead_job()
1110 } else if (edesc->mapped_dst_nents == 1) { in init_aead_job()
1114 dst_dma = edesc->sec4_sg_dma + in init_aead_job()
1132 struct aead_edesc *edesc, in init_gcm_job() argument
1138 u32 *desc = edesc->hw_desc; in init_gcm_job()
1142 init_aead_job(req, edesc, all_contig, encrypt); in init_gcm_job()
1162 struct aead_edesc *edesc, bool all_contig, in init_chachapoly_job() argument
1168 u32 *desc = edesc->hw_desc; in init_chachapoly_job()
1171 init_aead_job(req, edesc, all_contig, encrypt); in init_chachapoly_job()
1196 struct aead_edesc *edesc, in init_authenc_job() argument
1209 u32 *desc = edesc->hw_desc; in init_authenc_job()
1227 init_aead_job(req, edesc, all_contig, encrypt); in init_authenc_job()
1249 struct skcipher_edesc *edesc, in init_skcipher_job() argument
1256 u32 *desc = edesc->hw_desc; in init_skcipher_job()
1265 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); in init_skcipher_job()
1269 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); in init_skcipher_job()
1277 if (ivsize || edesc->mapped_src_nents > 1) { in init_skcipher_job()
1278 src_dma = edesc->sec4_sg_dma; in init_skcipher_job()
1279 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; in init_skcipher_job()
1290 } else if (!ivsize && edesc->mapped_dst_nents == 1) { in init_skcipher_job()
1293 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * in init_skcipher_job()
1316 struct aead_edesc *edesc; in aead_edesc_alloc() local
1398 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags); in aead_edesc_alloc()
1399 if (!edesc) { in aead_edesc_alloc()
1405 edesc->src_nents = src_nents; in aead_edesc_alloc()
1406 edesc->dst_nents = dst_nents; in aead_edesc_alloc()
1407 edesc->mapped_src_nents = mapped_src_nents; in aead_edesc_alloc()
1408 edesc->mapped_dst_nents = mapped_dst_nents; in aead_edesc_alloc()
1409 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + in aead_edesc_alloc()
1412 rctx->edesc = edesc; in aead_edesc_alloc()
1419 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1424 edesc->sec4_sg + sec4_sg_index, 0); in aead_edesc_alloc()
1428 return edesc; in aead_edesc_alloc()
1430 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1432 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1434 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
1435 kfree(edesc); in aead_edesc_alloc()
1439 edesc->sec4_sg_bytes = sec4_sg_bytes; in aead_edesc_alloc()
1441 return edesc; in aead_edesc_alloc()
1448 struct aead_edesc *edesc = rctx->edesc; in aead_enqueue_req() local
1449 u32 *desc = edesc->hw_desc; in aead_enqueue_req()
1464 aead_unmap(jrdev, edesc, req); in aead_enqueue_req()
1465 kfree(rctx->edesc); in aead_enqueue_req()
1473 struct aead_edesc *edesc; in chachapoly_crypt() local
1480 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, in chachapoly_crypt()
1482 if (IS_ERR(edesc)) in chachapoly_crypt()
1483 return PTR_ERR(edesc); in chachapoly_crypt()
1485 desc = edesc->hw_desc; in chachapoly_crypt()
1487 init_chachapoly_job(req, edesc, all_contig, encrypt); in chachapoly_crypt()
1507 struct aead_edesc *edesc; in aead_crypt() local
1514 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, in aead_crypt()
1516 if (IS_ERR(edesc)) in aead_crypt()
1517 return PTR_ERR(edesc); in aead_crypt()
1520 init_authenc_job(req, edesc, all_contig, encrypt); in aead_crypt()
1523 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in aead_crypt()
1524 desc_bytes(edesc->hw_desc), 1); in aead_crypt()
1544 u32 *desc = rctx->edesc->hw_desc; in aead_do_one_req()
1547 rctx->edesc->bklog = true; in aead_do_one_req()
1555 aead_unmap(ctx->jrdev, rctx->edesc, req); in aead_do_one_req()
1556 kfree(rctx->edesc); in aead_do_one_req()
1566 struct aead_edesc *edesc; in gcm_crypt() local
1573 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, in gcm_crypt()
1575 if (IS_ERR(edesc)) in gcm_crypt()
1576 return PTR_ERR(edesc); in gcm_crypt()
1579 init_gcm_job(req, edesc, all_contig, encrypt); in gcm_crypt()
1582 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in gcm_crypt()
1583 desc_bytes(edesc->hw_desc), 1); in gcm_crypt()
1621 struct skcipher_edesc *edesc; in skcipher_edesc_alloc() local
1700 aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes; in skcipher_edesc_alloc()
1705 edesc = kzalloc(aligned_size, flags); in skcipher_edesc_alloc()
1706 if (!edesc) { in skcipher_edesc_alloc()
1713 edesc->src_nents = src_nents; in skcipher_edesc_alloc()
1714 edesc->dst_nents = dst_nents; in skcipher_edesc_alloc()
1715 edesc->mapped_src_nents = mapped_src_nents; in skcipher_edesc_alloc()
1716 edesc->mapped_dst_nents = mapped_dst_nents; in skcipher_edesc_alloc()
1717 edesc->sec4_sg_bytes = sec4_sg_bytes; in skcipher_edesc_alloc()
1718 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + in skcipher_edesc_alloc()
1720 rctx->edesc = edesc; in skcipher_edesc_alloc()
1724 iv = skcipher_edesc_iv(edesc); in skcipher_edesc_alloc()
1732 kfree(edesc); in skcipher_edesc_alloc()
1736 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); in skcipher_edesc_alloc()
1739 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1743 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + in skcipher_edesc_alloc()
1747 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1751 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + in skcipher_edesc_alloc()
1755 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1758 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1762 kfree(edesc); in skcipher_edesc_alloc()
1767 edesc->iv_dma = iv_dma; in skcipher_edesc_alloc()
1770 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, in skcipher_edesc_alloc()
1773 return edesc; in skcipher_edesc_alloc()
1781 u32 *desc = rctx->edesc->hw_desc; in skcipher_do_one_req()
1784 rctx->edesc->bklog = true; in skcipher_do_one_req()
1792 skcipher_unmap(ctx->jrdev, rctx->edesc, req); in skcipher_do_one_req()
1793 kfree(rctx->edesc); in skcipher_do_one_req()
1811 struct skcipher_edesc *edesc; in skcipher_crypt() local
1845 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); in skcipher_crypt()
1846 if (IS_ERR(edesc)) in skcipher_crypt()
1847 return PTR_ERR(edesc); in skcipher_crypt()
1850 init_skcipher_job(req, edesc, encrypt); in skcipher_crypt()
1853 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, in skcipher_crypt()
1854 desc_bytes(edesc->hw_desc), 1); in skcipher_crypt()
1856 desc = edesc->hw_desc; in skcipher_crypt()
1869 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt()
1870 kfree(edesc); in skcipher_crypt()