Lines Matching refs:c_req

72 	if (req->c_req.encrypt)  in sec_alloc_queue_id()
82 if (req->c_req.encrypt) in sec_free_queue_id()
185 if (ctx->alg_type == SEC_AEAD && !req->c_req.encrypt) in sec_req_cb()
690 struct sec_cipher_req *c_req = &req->c_req; in GEN_SEC_SETKEY_FUNC() local
699 copy_size = c_req->c_len; in GEN_SEC_SETKEY_FUNC()
710 c_req->c_in_dma = qp_ctx->res[req_id].pbuf_dma; in GEN_SEC_SETKEY_FUNC()
712 if (!c_req->c_in_dma) { in GEN_SEC_SETKEY_FUNC()
717 c_req->c_out_dma = c_req->c_in_dma; in GEN_SEC_SETKEY_FUNC()
726 struct sec_cipher_req *c_req = &req->c_req; in sec_cipher_pbuf_unmap() local
733 copy_size = c_req->c_len + aead_req->assoclen; in sec_cipher_pbuf_unmap()
735 copy_size = c_req->c_len; in sec_cipher_pbuf_unmap()
749 struct sec_cipher_req *c_req = &req->c_req; in sec_cipher_map() local
758 c_req->c_ivin = res->pbuf + SEC_PBUF_IV_OFFSET; in sec_cipher_map()
759 c_req->c_ivin_dma = res->pbuf_dma + SEC_PBUF_IV_OFFSET; in sec_cipher_map()
768 c_req->c_ivin = res->c_ivin; in sec_cipher_map()
769 c_req->c_ivin_dma = res->c_ivin_dma; in sec_cipher_map()
775 c_req->c_in = hisi_acc_sg_buf_map_to_hw_sgl(dev, src, in sec_cipher_map()
778 &c_req->c_in_dma); in sec_cipher_map()
780 if (IS_ERR(c_req->c_in)) { in sec_cipher_map()
782 return PTR_ERR(c_req->c_in); in sec_cipher_map()
786 c_req->c_out = c_req->c_in; in sec_cipher_map()
787 c_req->c_out_dma = c_req->c_in_dma; in sec_cipher_map()
789 c_req->c_out = hisi_acc_sg_buf_map_to_hw_sgl(dev, dst, in sec_cipher_map()
792 &c_req->c_out_dma); in sec_cipher_map()
794 if (IS_ERR(c_req->c_out)) { in sec_cipher_map()
796 hisi_acc_sg_buf_unmap(dev, src, c_req->c_in); in sec_cipher_map()
797 return PTR_ERR(c_req->c_out); in sec_cipher_map()
807 struct sec_cipher_req *c_req = &req->c_req; in sec_cipher_unmap() local
814 hisi_acc_sg_buf_unmap(dev, src, c_req->c_in); in sec_cipher_unmap()
816 hisi_acc_sg_buf_unmap(dev, dst, c_req->c_out); in sec_cipher_unmap()
822 struct skcipher_request *sq = req->c_req.sk_req; in sec_skcipher_sgl_map()
829 struct skcipher_request *sq = req->c_req.sk_req; in sec_skcipher_sgl_unmap()
980 struct skcipher_request *sk_req = req->c_req.sk_req; in sec_skcipher_copy_iv()
981 struct sec_cipher_req *c_req = &req->c_req; in sec_skcipher_copy_iv() local
983 memcpy(c_req->c_ivin, sk_req->iv, ctx->c_ctx.ivsize); in sec_skcipher_copy_iv()
989 struct sec_cipher_req *c_req = &req->c_req; in sec_skcipher_bd_fill() local
998 sec_sqe->type2.c_ivin_addr = cpu_to_le64(c_req->c_ivin_dma); in sec_skcipher_bd_fill()
999 sec_sqe->type2.data_src_addr = cpu_to_le64(c_req->c_in_dma); in sec_skcipher_bd_fill()
1000 sec_sqe->type2.data_dst_addr = cpu_to_le64(c_req->c_out_dma); in sec_skcipher_bd_fill()
1009 if (c_req->encrypt) in sec_skcipher_bd_fill()
1020 if (c_req->c_in_dma != c_req->c_out_dma) in sec_skcipher_bd_fill()
1032 sec_sqe->type2.clen_ivhlen |= cpu_to_le32(c_req->c_len); in sec_skcipher_bd_fill()
1041 struct skcipher_request *sk_req = req->c_req.sk_req; in sec_update_iv()
1048 if (req->c_req.encrypt) in sec_update_iv()
1088 struct skcipher_request *sk_req = req->c_req.sk_req; in sec_skcipher_callback()
1096 if (!err && ctx->c_ctx.c_mode == SEC_CMODE_CBC && req->c_req.encrypt) in sec_skcipher_callback()
1104 backlog_sk_req = backlog_req->c_req.sk_req; in sec_skcipher_callback()
1117 struct sec_cipher_req *c_req = &req->c_req; in sec_aead_copy_iv() local
1119 memcpy(c_req->c_ivin, aead_req->iv, ctx->c_ctx.ivsize); in sec_aead_copy_iv()
1126 struct sec_cipher_req *c_req = &req->c_req; in sec_auth_bd_fill_ex() local
1148 sec_sqe->type2.alen_ivllen = cpu_to_le32(c_req->c_len + aq->assoclen); in sec_auth_bd_fill_ex()
1167 sec_auth_bd_fill_ex(auth_ctx, req->c_req.encrypt, req, sec_sqe); in sec_aead_bd_fill()
1177 struct sec_cipher_req *c_req = &req->c_req; in sec_aead_callback() local
1184 if (!err && c->c_ctx.c_mode == SEC_CMODE_CBC && c_req->encrypt) in sec_aead_callback()
1188 if (!err && c_req->encrypt) { in sec_aead_callback()
1244 struct sec_cipher_req *c_req = &req->c_req; in sec_process() local
1256 if (ctx->c_ctx.c_mode == SEC_CMODE_CBC && !req->c_req.encrypt) in sec_process()
1270 if (ctx->c_ctx.c_mode == SEC_CMODE_CBC && !req->c_req.encrypt) { in sec_process()
1272 memcpy(req->c_req.sk_req->iv, c_req->c_ivin, in sec_process()
1275 memcpy(req->aead_req.aead_req->iv, c_req->c_ivin, in sec_process()
1412 struct skcipher_request *sk_req = sreq->c_req.sk_req; in sec_skcipher_param_check()
1420 sreq->c_req.c_len = sk_req->cryptlen; in sec_skcipher_param_check()
1456 req->c_req.sk_req = sk_req; in sec_skcipher_crypto()
1457 req->c_req.encrypt = encrypt; in sec_skcipher_crypto()
1559 if (sreq->c_req.encrypt) in sec_aead_param_check()
1560 sreq->c_req.c_len = req->cryptlen; in sec_aead_param_check()
1562 sreq->c_req.c_len = req->cryptlen - authsize; in sec_aead_param_check()
1564 if (unlikely(sreq->c_req.c_len & (AES_BLOCK_SIZE - 1))) { in sec_aead_param_check()
1581 req->c_req.encrypt = encrypt; in sec_aead_crypto()