Home
last modified time | relevance | path

Searched refs:async_req (Results 1 – 25 of 32) sorted by relevance

12

/Linux-v5.4/drivers/crypto/qce/
Dcore.c55 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument
59 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request()
65 ret = ops->async_req_handle(async_req); in qce_handle_request()
75 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local
91 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue()
92 if (async_req) in qce_handle_queue()
93 qce->req = async_req; in qce_handle_queue()
97 if (!async_req) in qce_handle_queue()
106 err = qce_handle_request(async_req); in qce_handle_queue()
Dcommon.c222 static int qce_setup_regs_ahash(struct crypto_async_request *async_req, in qce_setup_regs_ahash() argument
225 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash()
226 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash()
228 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash()
231 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash()
307 static int qce_setup_regs_ablkcipher(struct crypto_async_request *async_req, in qce_setup_regs_ablkcipher() argument
310 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_setup_regs_ablkcipher()
312 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_ablkcipher()
313 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_setup_regs_ablkcipher()
388 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen, in qce_start() argument
[all …]
Dablkcipher.c19 struct crypto_async_request *async_req = data; in qce_ablkcipher_done() local
20 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_done()
22 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_done()
52 qce_ablkcipher_async_req_handle(struct crypto_async_request *async_req) in qce_ablkcipher_async_req_handle() argument
54 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_async_req_handle()
57 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_async_req_handle()
128 qce_ablkcipher_done, async_req); in qce_ablkcipher_async_req_handle()
134 ret = qce_start(async_req, tmpl->crypto_alg_type, req->nbytes, 0); in qce_ablkcipher_async_req_handle()
Dsha.c31 struct crypto_async_request *async_req = data; in qce_ahash_done() local
32 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done()
35 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done()
68 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument
70 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle()
72 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle()
73 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle()
103 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle()
109 ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0); in qce_ahash_async_req_handle()
Dcore.h57 int (*async_req_handle)(struct crypto_async_request *async_req);
Dcommon.h91 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
/Linux-v5.4/crypto/
Drsa-pkcs1pad.c216 struct crypto_async_request async_req; in pkcs1pad_encrypt_sign_complete_cb() local
221 async_req.data = req->base.data; in pkcs1pad_encrypt_sign_complete_cb()
222 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_encrypt_sign_complete_cb()
223 async_req.flags = child_async_req->flags; in pkcs1pad_encrypt_sign_complete_cb()
224 req->base.complete(&async_req, in pkcs1pad_encrypt_sign_complete_cb()
334 struct crypto_async_request async_req; in pkcs1pad_decrypt_complete_cb() local
339 async_req.data = req->base.data; in pkcs1pad_decrypt_complete_cb()
340 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_decrypt_complete_cb()
341 async_req.flags = child_async_req->flags; in pkcs1pad_decrypt_complete_cb()
342 req->base.complete(&async_req, pkcs1pad_decrypt_complete(req, err)); in pkcs1pad_decrypt_complete_cb()
[all …]
Dcrypto_engine.c68 struct crypto_async_request *async_req, *backlog; in crypto_pump_requests() local
113 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests()
114 if (!async_req) in crypto_pump_requests()
117 engine->cur_req = async_req; in crypto_pump_requests()
137 enginectx = crypto_tfm_ctx(async_req->tfm); in crypto_pump_requests()
140 ret = enginectx->op.prepare_request(engine, async_req); in crypto_pump_requests()
153 ret = enginectx->op.do_one_request(engine, async_req); in crypto_pump_requests()
161 crypto_finalize_request(engine, async_req, ret); in crypto_pump_requests()
/Linux-v5.4/drivers/crypto/rockchip/
Drk3288_crypto.c185 struct crypto_async_request *async_req) in rk_crypto_enqueue() argument
191 ret = crypto_enqueue_request(&dev->queue, async_req); in rk_crypto_enqueue()
206 struct crypto_async_request *async_req, *backlog; in rk_crypto_queue_task_cb() local
213 async_req = crypto_dequeue_request(&dev->queue); in rk_crypto_queue_task_cb()
215 if (!async_req) { in rk_crypto_queue_task_cb()
227 dev->async_req = async_req; in rk_crypto_queue_task_cb()
230 dev->complete(dev->async_req, err); in rk_crypto_queue_task_cb()
238 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb()
244 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb()
Drk3288_crypto_ablkcipher.c200 ablkcipher_request_cast(dev->async_req); in rk_ablk_hw_init()
248 ablkcipher_request_cast(dev->async_req); in rk_set_data_start()
273 ablkcipher_request_cast(dev->async_req); in rk_ablk_start()
296 ablkcipher_request_cast(dev->async_req); in rk_iv_copyback()
316 ablkcipher_request_cast(dev->async_req); in rk_update_iv()
343 ablkcipher_request_cast(dev->async_req); in rk_ablk_rx()
371 dev->complete(dev->async_req, 0); in rk_ablk_rx()
Drk3288_crypto_ahash.c48 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_reg_init()
195 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_start()
232 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_crypto_rx()
264 dev->complete(dev->async_req, 0); in rk_ahash_crypto_rx()
Drk3288_crypto.h196 struct crypto_async_request *async_req; member
227 struct crypto_async_request *async_req);
/Linux-v5.4/drivers/crypto/ccp/
Dccp-crypto-aes.c22 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument
24 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_complete()
139 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument
142 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_rfc3686_complete()
148 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
Dccp-crypto-des3.c21 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret) in ccp_des3_complete() argument
23 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_des3_complete()
Dccp-crypto-aes-xts.c62 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument
64 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_xts_complete()
Dccp-crypto-rsa.c44 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument
46 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
Dccp-crypto-aes-cmac.c23 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument
26 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
/Linux-v5.4/drivers/crypto/
Dsahara.c1051 struct crypto_async_request *async_req; in sahara_queue_manage() local
1060 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage()
1066 if (async_req) { in sahara_queue_manage()
1067 if (crypto_tfm_alg_type(async_req->tfm) == in sahara_queue_manage()
1070 ahash_request_cast(async_req); in sahara_queue_manage()
1075 ablkcipher_request_cast(async_req); in sahara_queue_manage()
1080 async_req->complete(async_req, ret); in sahara_queue_manage()
Ds5p-sss.c1375 struct crypto_async_request *async_req, *backlog; in s5p_hash_handle_queue() local
1391 async_req = crypto_dequeue_request(&dd->hash_queue); in s5p_hash_handle_queue()
1392 if (async_req) in s5p_hash_handle_queue()
1397 if (!async_req) in s5p_hash_handle_queue()
1403 req = ahash_request_cast(async_req); in s5p_hash_handle_queue()
2006 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local
2012 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb()
2014 if (!async_req) { in s5p_tasklet_cb()
2024 dev->req = ablkcipher_request_cast(async_req); in s5p_tasklet_cb()
Dimg-hash.c497 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local
513 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue()
514 if (async_req) in img_hash_handle_queue()
519 if (!async_req) in img_hash_handle_queue()
525 req = ahash_request_cast(async_req); in img_hash_handle_queue()
Datmel-tdes.c588 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local
602 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue()
603 if (async_req) in atmel_tdes_handle_queue()
607 if (!async_req) in atmel_tdes_handle_queue()
613 req = ablkcipher_request_cast(async_req); in atmel_tdes_handle_queue()
/Linux-v5.4/drivers/net/usb/
Drtl8150.c141 struct async_req { struct
189 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb()
202 struct async_req *req; in async_set_registers()
204 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
/Linux-v5.4/drivers/nvme/host/
Dtcp.c115 struct nvme_tcp_request async_req; member
161 return req == &req->queue->ctrl->async_req; in nvme_tcp_async_req()
1112 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_free_async_req()
1120 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_alloc_async_req()
1502 if (to_tcp_ctrl(ctrl)->async_req.pdu) { in nvme_tcp_free_admin_queue()
1504 to_tcp_ctrl(ctrl)->async_req.pdu = NULL; in nvme_tcp_free_admin_queue()
2018 struct nvme_tcp_cmd_pdu *pdu = ctrl->async_req.pdu; in nvme_tcp_submit_async_event()
2034 ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU; in nvme_tcp_submit_async_event()
2035 ctrl->async_req.offset = 0; in nvme_tcp_submit_async_event()
2036 ctrl->async_req.curr_bio = NULL; in nvme_tcp_submit_async_event()
[all …]
/Linux-v5.4/drivers/crypto/mediatek/
Dmtk-sha.c654 struct crypto_async_request *async_req, *backlog; in mtk_sha_handle_queue() local
669 async_req = crypto_dequeue_request(&sha->queue); in mtk_sha_handle_queue()
670 if (async_req) in mtk_sha_handle_queue()
674 if (!async_req) in mtk_sha_handle_queue()
680 req = ahash_request_cast(async_req); in mtk_sha_handle_queue()
/Linux-v5.4/drivers/crypto/amcc/
Dcrypto4xx_core.h70 struct crypto_async_request *async_req; /* base crypto request member

12