Home
last modified time | relevance | path

Searched refs:async_req (Results 1 – 25 of 31) sorted by relevance

12

/Linux-v4.19/drivers/crypto/qce/
Dcore.c63 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument
67 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request()
73 ret = ops->async_req_handle(async_req); in qce_handle_request()
83 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local
99 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue()
100 if (async_req) in qce_handle_queue()
101 qce->req = async_req; in qce_handle_queue()
105 if (!async_req) in qce_handle_queue()
114 err = qce_handle_request(async_req); in qce_handle_queue()
Dcommon.c230 static int qce_setup_regs_ahash(struct crypto_async_request *async_req, in qce_setup_regs_ahash() argument
233 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash()
234 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash()
236 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash()
239 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash()
315 static int qce_setup_regs_ablkcipher(struct crypto_async_request *async_req, in qce_setup_regs_ablkcipher() argument
318 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_setup_regs_ablkcipher()
320 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_ablkcipher()
321 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_setup_regs_ablkcipher()
396 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen, in qce_start() argument
[all …]
Dablkcipher.c27 struct crypto_async_request *async_req = data; in qce_ablkcipher_done() local
28 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_done()
30 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_done()
60 qce_ablkcipher_async_req_handle(struct crypto_async_request *async_req) in qce_ablkcipher_async_req_handle() argument
62 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_async_req_handle()
65 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_async_req_handle()
136 qce_ablkcipher_done, async_req); in qce_ablkcipher_async_req_handle()
142 ret = qce_start(async_req, tmpl->crypto_alg_type, req->nbytes, 0); in qce_ablkcipher_async_req_handle()
Dsha.c39 struct crypto_async_request *async_req = data; in qce_ahash_done() local
40 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done()
43 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done()
76 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument
78 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle()
80 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle()
81 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle()
111 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle()
117 ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0); in qce_ahash_async_req_handle()
Dcore.h65 int (*async_req_handle)(struct crypto_async_request *async_req);
Dcommon.h99 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
/Linux-v4.19/crypto/
Drsa-pkcs1pad.c219 struct crypto_async_request async_req; in pkcs1pad_encrypt_sign_complete_cb() local
224 async_req.data = req->base.data; in pkcs1pad_encrypt_sign_complete_cb()
225 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_encrypt_sign_complete_cb()
226 async_req.flags = child_async_req->flags; in pkcs1pad_encrypt_sign_complete_cb()
227 req->base.complete(&async_req, in pkcs1pad_encrypt_sign_complete_cb()
346 struct crypto_async_request async_req; in pkcs1pad_decrypt_complete_cb() local
351 async_req.data = req->base.data; in pkcs1pad_decrypt_complete_cb()
352 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_decrypt_complete_cb()
353 async_req.flags = child_async_req->flags; in pkcs1pad_decrypt_complete_cb()
354 req->base.complete(&async_req, pkcs1pad_decrypt_complete(req, err)); in pkcs1pad_decrypt_complete_cb()
[all …]
Dcrypto_engine.c73 struct crypto_async_request *async_req, *backlog; in crypto_pump_requests() local
118 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests()
119 if (!async_req) in crypto_pump_requests()
122 engine->cur_req = async_req; in crypto_pump_requests()
142 enginectx = crypto_tfm_ctx(async_req->tfm); in crypto_pump_requests()
145 ret = enginectx->op.prepare_request(engine, async_req); in crypto_pump_requests()
158 ret = enginectx->op.do_one_request(engine, async_req); in crypto_pump_requests()
166 crypto_finalize_request(engine, async_req, ret); in crypto_pump_requests()
/Linux-v4.19/drivers/crypto/rockchip/
Drk3288_crypto.c188 struct crypto_async_request *async_req) in rk_crypto_enqueue() argument
194 ret = crypto_enqueue_request(&dev->queue, async_req); in rk_crypto_enqueue()
209 struct crypto_async_request *async_req, *backlog; in rk_crypto_queue_task_cb() local
216 async_req = crypto_dequeue_request(&dev->queue); in rk_crypto_queue_task_cb()
218 if (!async_req) { in rk_crypto_queue_task_cb()
230 dev->async_req = async_req; in rk_crypto_queue_task_cb()
233 dev->complete(dev->async_req, err); in rk_crypto_queue_task_cb()
241 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb()
247 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb()
Drk3288_crypto_ablkcipher.c198 ablkcipher_request_cast(dev->async_req); in rk_ablk_hw_init()
255 ablkcipher_request_cast(dev->async_req); in rk_ablk_start()
277 ablkcipher_request_cast(dev->async_req); in rk_iv_copyback()
296 ablkcipher_request_cast(dev->async_req); in rk_ablk_rx()
323 dev->complete(dev->async_req, 0); in rk_ablk_rx()
Drk3288_crypto_ahash.c51 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_reg_init()
198 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_start()
235 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_crypto_rx()
267 dev->complete(dev->async_req, 0); in rk_ahash_crypto_rx()
Drk3288_crypto.h196 struct crypto_async_request *async_req; member
226 struct crypto_async_request *async_req);
/Linux-v4.19/drivers/crypto/ccp/
Dccp-crypto-aes.c25 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument
27 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_complete()
143 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument
146 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_rfc3686_complete()
152 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
Dccp-crypto-des3.c24 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret) in ccp_des3_complete() argument
26 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_des3_complete()
Dccp-crypto-aes-xts.c65 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument
67 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_xts_complete()
Dccp-crypto-rsa.c48 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument
50 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
Dccp-crypto-aes-cmac.c26 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument
29 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
/Linux-v4.19/drivers/crypto/
Dsahara.c1055 struct crypto_async_request *async_req; in sahara_queue_manage() local
1064 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage()
1070 if (async_req) { in sahara_queue_manage()
1071 if (crypto_tfm_alg_type(async_req->tfm) == in sahara_queue_manage()
1074 ahash_request_cast(async_req); in sahara_queue_manage()
1079 ablkcipher_request_cast(async_req); in sahara_queue_manage()
1084 async_req->complete(async_req, ret); in sahara_queue_manage()
Ds5p-sss.c1369 struct crypto_async_request *async_req, *backlog; in s5p_hash_handle_queue() local
1385 async_req = crypto_dequeue_request(&dd->hash_queue); in s5p_hash_handle_queue()
1386 if (async_req) in s5p_hash_handle_queue()
1391 if (!async_req) in s5p_hash_handle_queue()
1397 req = ahash_request_cast(async_req); in s5p_hash_handle_queue()
1992 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local
1998 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb()
2000 if (!async_req) { in s5p_tasklet_cb()
2010 dev->req = ablkcipher_request_cast(async_req); in s5p_tasklet_cb()
Dimg-hash.c500 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local
516 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue()
517 if (async_req) in img_hash_handle_queue()
522 if (!async_req) in img_hash_handle_queue()
528 req = ahash_request_cast(async_req); in img_hash_handle_queue()
Datmel-tdes.c591 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local
605 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue()
606 if (async_req) in atmel_tdes_handle_queue()
610 if (!async_req) in atmel_tdes_handle_queue()
616 req = ablkcipher_request_cast(async_req); in atmel_tdes_handle_queue()
/Linux-v4.19/drivers/net/usb/
Drtl8150.c144 struct async_req { struct
192 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb()
205 struct async_req *req; in async_set_registers()
207 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
/Linux-v4.19/drivers/crypto/mediatek/
Dmtk-sha.c658 struct crypto_async_request *async_req, *backlog; in mtk_sha_handle_queue() local
673 async_req = crypto_dequeue_request(&sha->queue); in mtk_sha_handle_queue()
674 if (async_req) in mtk_sha_handle_queue()
678 if (!async_req) in mtk_sha_handle_queue()
684 req = ahash_request_cast(async_req); in mtk_sha_handle_queue()
/Linux-v4.19/drivers/crypto/amcc/
Dcrypto4xx_core.h78 struct crypto_async_request *async_req; /* base crypto request member
Dcrypto4xx_core.c538 req = skcipher_request_cast(pd_uinfo->async_req); in crypto4xx_cipher_done()
570 ahash_req = ahash_request_cast(pd_uinfo->async_req); in crypto4xx_ahash_done()
586 struct aead_request *aead_req = container_of(pd_uinfo->async_req, in crypto4xx_aead_done()
651 switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) { in crypto4xx_pd_done()
808 pd_uinfo->async_req = req; in crypto4xx_build_pd()

12