/Linux-v5.10/crypto/ |
D | crypto_engine.c | 73 struct crypto_async_request *async_req, *backlog; in crypto_pump_requests() local 119 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests() 120 if (!async_req) in crypto_pump_requests() 129 engine->cur_req = async_req; in crypto_pump_requests() 150 enginectx = crypto_tfm_ctx(async_req->tfm); in crypto_pump_requests() 153 ret = enginectx->op.prepare_request(engine, async_req); in crypto_pump_requests() 166 ret = enginectx->op.do_one_request(engine, async_req); in crypto_pump_requests() 189 async_req); in crypto_pump_requests() 200 crypto_enqueue_request_head(&engine->queue, async_req); in crypto_pump_requests() 210 ret = enginectx->op.unprepare_request(engine, async_req); in crypto_pump_requests() [all …]
|
D | rsa-pkcs1pad.c | 217 struct crypto_async_request async_req; in pkcs1pad_encrypt_sign_complete_cb() local 222 async_req.data = req->base.data; in pkcs1pad_encrypt_sign_complete_cb() 223 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_encrypt_sign_complete_cb() 224 async_req.flags = child_async_req->flags; in pkcs1pad_encrypt_sign_complete_cb() 225 req->base.complete(&async_req, in pkcs1pad_encrypt_sign_complete_cb() 335 struct crypto_async_request async_req; in pkcs1pad_decrypt_complete_cb() local 340 async_req.data = req->base.data; in pkcs1pad_decrypt_complete_cb() 341 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_decrypt_complete_cb() 342 async_req.flags = child_async_req->flags; in pkcs1pad_decrypt_complete_cb() 343 req->base.complete(&async_req, pkcs1pad_decrypt_complete(req, err)); in pkcs1pad_decrypt_complete_cb() [all …]
|
/Linux-v5.10/drivers/crypto/qce/ |
D | core.c | 60 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument 64 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request() 70 ret = ops->async_req_handle(async_req); in qce_handle_request() 80 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local 96 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue() 97 if (async_req) in qce_handle_queue() 98 qce->req = async_req; in qce_handle_queue() 102 if (!async_req) in qce_handle_queue() 111 err = qce_handle_request(async_req); in qce_handle_queue()
|
D | common.c | 142 static int qce_setup_regs_ahash(struct crypto_async_request *async_req, in qce_setup_regs_ahash() argument 145 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash() 146 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash() 148 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash() 151 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash() 308 static int qce_setup_regs_skcipher(struct crypto_async_request *async_req, in qce_setup_regs_skcipher() argument 311 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_setup_regs_skcipher() 313 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_skcipher() 390 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen, in qce_start() argument 396 return qce_setup_regs_skcipher(async_req, totallen, offset); in qce_start() [all …]
|
D | sha.c | 32 struct crypto_async_request *async_req = data; in qce_ahash_done() local 33 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done() 36 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done() 69 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument 71 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle() 73 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle() 74 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle() 104 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle() 110 ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0); in qce_ahash_async_req_handle()
|
D | skcipher.c | 28 struct crypto_async_request *async_req = data; in qce_skcipher_done() local 29 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_skcipher_done() 63 qce_skcipher_async_req_handle(struct crypto_async_request *async_req) in qce_skcipher_async_req_handle() argument 65 struct skcipher_request *req = skcipher_request_cast(async_req); in qce_skcipher_async_req_handle() 140 qce_skcipher_done, async_req); in qce_skcipher_async_req_handle() 146 ret = qce_start(async_req, tmpl->crypto_alg_type, req->cryptlen, 0); in qce_skcipher_async_req_handle()
|
D | core.h | 57 int (*async_req_handle)(struct crypto_async_request *async_req);
|
D | common.h | 97 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
|
/Linux-v5.10/drivers/crypto/rockchip/ |
D | rk3288_crypto.c | 186 struct crypto_async_request *async_req) in rk_crypto_enqueue() argument 192 ret = crypto_enqueue_request(&dev->queue, async_req); in rk_crypto_enqueue() 207 struct crypto_async_request *async_req, *backlog; in rk_crypto_queue_task_cb() local 214 async_req = crypto_dequeue_request(&dev->queue); in rk_crypto_queue_task_cb() 216 if (!async_req) { in rk_crypto_queue_task_cb() 228 dev->async_req = async_req; in rk_crypto_queue_task_cb() 231 dev->complete(dev->async_req, err); in rk_crypto_queue_task_cb() 239 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb() 245 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb()
|
D | rk3288_crypto_skcipher.c | 199 skcipher_request_cast(dev->async_req); in rk_ablk_hw_init() 247 skcipher_request_cast(dev->async_req); in rk_set_data_start() 272 skcipher_request_cast(dev->async_req); in rk_ablk_start() 295 skcipher_request_cast(dev->async_req); in rk_iv_copyback() 315 skcipher_request_cast(dev->async_req); in rk_update_iv() 342 skcipher_request_cast(dev->async_req); in rk_ablk_rx() 370 dev->complete(dev->async_req, 0); in rk_ablk_rx()
|
D | rk3288_crypto_ahash.c | 49 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_reg_init() 196 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_start() 233 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_crypto_rx() 265 dev->complete(dev->async_req, 0); in rk_ahash_crypto_rx()
|
D | rk3288_crypto.h | 198 struct crypto_async_request *async_req; member 229 struct crypto_async_request *async_req);
|
/Linux-v5.10/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 22 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument 24 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_complete() 134 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument 137 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_rfc3686_complete() 143 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
|
D | ccp-crypto-des3.c | 21 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret) in ccp_des3_complete() argument 23 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_des3_complete()
|
D | ccp-crypto-aes-xts.c | 62 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument 64 struct skcipher_request *req = skcipher_request_cast(async_req); in ccp_aes_xts_complete()
|
D | ccp-crypto-rsa.c | 44 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument 46 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
|
D | ccp-crypto-aes-cmac.c | 23 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument 26 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
|
/Linux-v5.10/drivers/crypto/allwinner/sun8i-ce/ |
D | sun8i-ce-cipher.c | 78 static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req) in sun8i_ce_cipher_prepare() argument 80 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_prepare() 287 static int sun8i_ce_cipher_unprepare(struct crypto_engine *engine, void *async_req) in sun8i_ce_cipher_unprepare() argument 289 struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base); in sun8i_ce_cipher_unprepare()
|
/Linux-v5.10/drivers/crypto/ |
D | sahara.c | 1038 struct crypto_async_request *async_req; in sahara_queue_manage() local 1047 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage() 1053 if (async_req) { in sahara_queue_manage() 1054 if (crypto_tfm_alg_type(async_req->tfm) == in sahara_queue_manage() 1057 ahash_request_cast(async_req); in sahara_queue_manage() 1062 skcipher_request_cast(async_req); in sahara_queue_manage() 1067 async_req->complete(async_req, ret); in sahara_queue_manage()
|
D | s5p-sss.c | 1376 struct crypto_async_request *async_req, *backlog; in s5p_hash_handle_queue() local 1392 async_req = crypto_dequeue_request(&dd->hash_queue); in s5p_hash_handle_queue() 1393 if (async_req) in s5p_hash_handle_queue() 1398 if (!async_req) in s5p_hash_handle_queue() 1404 req = ahash_request_cast(async_req); in s5p_hash_handle_queue() 1980 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local 1986 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb() 1988 if (!async_req) { in s5p_tasklet_cb() 1998 dev->req = skcipher_request_cast(async_req); in s5p_tasklet_cb()
|
D | img-hash.c | 498 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local 514 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue() 515 if (async_req) in img_hash_handle_queue() 520 if (!async_req) in img_hash_handle_queue() 526 req = ahash_request_cast(async_req); in img_hash_handle_queue()
|
D | atmel-tdes.c | 609 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local 623 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue() 624 if (async_req) in atmel_tdes_handle_queue() 628 if (!async_req) in atmel_tdes_handle_queue() 634 req = skcipher_request_cast(async_req); in atmel_tdes_handle_queue()
|
/Linux-v5.10/drivers/net/usb/ |
D | rtl8150.c | 141 struct async_req { struct 169 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb() 182 struct async_req *req; in async_set_registers() 184 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
|
/Linux-v5.10/drivers/nvme/host/ |
D | tcp.c | 129 struct nvme_tcp_request async_req; member 176 return req == &req->queue->ctrl->async_req; in nvme_tcp_async_req() 1178 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_free_async_req() 1186 struct nvme_tcp_request *async = &ctrl->async_req; in nvme_tcp_alloc_async_req() 1596 if (to_tcp_ctrl(ctrl)->async_req.pdu) { in nvme_tcp_free_admin_queue() 1599 to_tcp_ctrl(ctrl)->async_req.pdu = NULL; in nvme_tcp_free_admin_queue() 2141 struct nvme_tcp_cmd_pdu *pdu = ctrl->async_req.pdu; in nvme_tcp_submit_async_event() 2157 ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU; in nvme_tcp_submit_async_event() 2158 ctrl->async_req.offset = 0; in nvme_tcp_submit_async_event() 2159 ctrl->async_req.curr_bio = NULL; in nvme_tcp_submit_async_event() [all …]
|
/Linux-v5.10/drivers/crypto/mediatek/ |
D | mtk-sha.c | 654 struct crypto_async_request *async_req, *backlog; in mtk_sha_handle_queue() local 669 async_req = crypto_dequeue_request(&sha->queue); in mtk_sha_handle_queue() 670 if (async_req) in mtk_sha_handle_queue() 674 if (!async_req) in mtk_sha_handle_queue() 680 req = ahash_request_cast(async_req); in mtk_sha_handle_queue()
|