/Linux-v4.19/drivers/crypto/qce/ |
D | core.c | 63 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument 67 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request() 73 ret = ops->async_req_handle(async_req); in qce_handle_request() 83 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local 99 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue() 100 if (async_req) in qce_handle_queue() 101 qce->req = async_req; in qce_handle_queue() 105 if (!async_req) in qce_handle_queue() 114 err = qce_handle_request(async_req); in qce_handle_queue()
|
D | common.c | 230 static int qce_setup_regs_ahash(struct crypto_async_request *async_req, in qce_setup_regs_ahash() argument 233 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash() 234 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash() 236 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash() 239 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash() 315 static int qce_setup_regs_ablkcipher(struct crypto_async_request *async_req, in qce_setup_regs_ablkcipher() argument 318 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_setup_regs_ablkcipher() 320 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_ablkcipher() 321 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_setup_regs_ablkcipher() 396 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen, in qce_start() argument [all …]
|
D | ablkcipher.c | 27 struct crypto_async_request *async_req = data; in qce_ablkcipher_done() local 28 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_done() 30 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_done() 60 qce_ablkcipher_async_req_handle(struct crypto_async_request *async_req) in qce_ablkcipher_async_req_handle() argument 62 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_async_req_handle() 65 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_async_req_handle() 136 qce_ablkcipher_done, async_req); in qce_ablkcipher_async_req_handle() 142 ret = qce_start(async_req, tmpl->crypto_alg_type, req->nbytes, 0); in qce_ablkcipher_async_req_handle()
|
D | sha.c | 39 struct crypto_async_request *async_req = data; in qce_ahash_done() local 40 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done() 43 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done() 76 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument 78 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle() 80 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle() 81 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle() 111 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle() 117 ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0); in qce_ahash_async_req_handle()
|
D | core.h | 65 int (*async_req_handle)(struct crypto_async_request *async_req);
|
D | common.h | 99 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
|
/Linux-v4.19/crypto/ |
D | rsa-pkcs1pad.c | 219 struct crypto_async_request async_req; in pkcs1pad_encrypt_sign_complete_cb() local 224 async_req.data = req->base.data; in pkcs1pad_encrypt_sign_complete_cb() 225 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_encrypt_sign_complete_cb() 226 async_req.flags = child_async_req->flags; in pkcs1pad_encrypt_sign_complete_cb() 227 req->base.complete(&async_req, in pkcs1pad_encrypt_sign_complete_cb() 346 struct crypto_async_request async_req; in pkcs1pad_decrypt_complete_cb() local 351 async_req.data = req->base.data; in pkcs1pad_decrypt_complete_cb() 352 async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req)); in pkcs1pad_decrypt_complete_cb() 353 async_req.flags = child_async_req->flags; in pkcs1pad_decrypt_complete_cb() 354 req->base.complete(&async_req, pkcs1pad_decrypt_complete(req, err)); in pkcs1pad_decrypt_complete_cb() [all …]
|
D | crypto_engine.c | 73 struct crypto_async_request *async_req, *backlog; in crypto_pump_requests() local 118 async_req = crypto_dequeue_request(&engine->queue); in crypto_pump_requests() 119 if (!async_req) in crypto_pump_requests() 122 engine->cur_req = async_req; in crypto_pump_requests() 142 enginectx = crypto_tfm_ctx(async_req->tfm); in crypto_pump_requests() 145 ret = enginectx->op.prepare_request(engine, async_req); in crypto_pump_requests() 158 ret = enginectx->op.do_one_request(engine, async_req); in crypto_pump_requests() 166 crypto_finalize_request(engine, async_req, ret); in crypto_pump_requests()
|
/Linux-v4.19/drivers/crypto/rockchip/ |
D | rk3288_crypto.c | 188 struct crypto_async_request *async_req) in rk_crypto_enqueue() argument 194 ret = crypto_enqueue_request(&dev->queue, async_req); in rk_crypto_enqueue() 209 struct crypto_async_request *async_req, *backlog; in rk_crypto_queue_task_cb() local 216 async_req = crypto_dequeue_request(&dev->queue); in rk_crypto_queue_task_cb() 218 if (!async_req) { in rk_crypto_queue_task_cb() 230 dev->async_req = async_req; in rk_crypto_queue_task_cb() 233 dev->complete(dev->async_req, err); in rk_crypto_queue_task_cb() 241 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb() 247 dev->complete(dev->async_req, dev->err); in rk_crypto_done_task_cb()
|
D | rk3288_crypto_ablkcipher.c | 198 ablkcipher_request_cast(dev->async_req); in rk_ablk_hw_init() 255 ablkcipher_request_cast(dev->async_req); in rk_ablk_start() 277 ablkcipher_request_cast(dev->async_req); in rk_iv_copyback() 296 ablkcipher_request_cast(dev->async_req); in rk_ablk_rx() 323 dev->complete(dev->async_req, 0); in rk_ablk_rx()
|
D | rk3288_crypto_ahash.c | 51 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_reg_init() 198 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_start() 235 struct ahash_request *req = ahash_request_cast(dev->async_req); in rk_ahash_crypto_rx() 267 dev->complete(dev->async_req, 0); in rk_ahash_crypto_rx()
|
D | rk3288_crypto.h | 196 struct crypto_async_request *async_req; member 226 struct crypto_async_request *async_req);
|
/Linux-v4.19/drivers/crypto/ccp/ |
D | ccp-crypto-aes.c | 25 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument 27 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_complete() 143 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument 146 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_rfc3686_complete() 152 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
|
D | ccp-crypto-des3.c | 24 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret) in ccp_des3_complete() argument 26 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_des3_complete()
|
D | ccp-crypto-aes-xts.c | 65 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument 67 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_xts_complete()
|
D | ccp-crypto-rsa.c | 48 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret) in ccp_rsa_complete() argument 50 struct akcipher_request *req = akcipher_request_cast(async_req); in ccp_rsa_complete()
|
D | ccp-crypto-aes-cmac.c | 26 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument 29 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
|
/Linux-v4.19/drivers/crypto/ |
D | sahara.c | 1055 struct crypto_async_request *async_req; in sahara_queue_manage() local 1064 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage() 1070 if (async_req) { in sahara_queue_manage() 1071 if (crypto_tfm_alg_type(async_req->tfm) == in sahara_queue_manage() 1074 ahash_request_cast(async_req); in sahara_queue_manage() 1079 ablkcipher_request_cast(async_req); in sahara_queue_manage() 1084 async_req->complete(async_req, ret); in sahara_queue_manage()
|
D | s5p-sss.c | 1369 struct crypto_async_request *async_req, *backlog; in s5p_hash_handle_queue() local 1385 async_req = crypto_dequeue_request(&dd->hash_queue); in s5p_hash_handle_queue() 1386 if (async_req) in s5p_hash_handle_queue() 1391 if (!async_req) in s5p_hash_handle_queue() 1397 req = ahash_request_cast(async_req); in s5p_hash_handle_queue() 1992 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local 1998 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb() 2000 if (!async_req) { in s5p_tasklet_cb() 2010 dev->req = ablkcipher_request_cast(async_req); in s5p_tasklet_cb()
|
D | img-hash.c | 500 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local 516 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue() 517 if (async_req) in img_hash_handle_queue() 522 if (!async_req) in img_hash_handle_queue() 528 req = ahash_request_cast(async_req); in img_hash_handle_queue()
|
D | atmel-tdes.c | 591 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local 605 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue() 606 if (async_req) in atmel_tdes_handle_queue() 610 if (!async_req) in atmel_tdes_handle_queue() 616 req = ablkcipher_request_cast(async_req); in atmel_tdes_handle_queue()
|
/Linux-v4.19/drivers/net/usb/ |
D | rtl8150.c | 144 struct async_req { struct 192 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb() 205 struct async_req *req; in async_set_registers() 207 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
|
/Linux-v4.19/drivers/crypto/mediatek/ |
D | mtk-sha.c | 658 struct crypto_async_request *async_req, *backlog; in mtk_sha_handle_queue() local 673 async_req = crypto_dequeue_request(&sha->queue); in mtk_sha_handle_queue() 674 if (async_req) in mtk_sha_handle_queue() 678 if (!async_req) in mtk_sha_handle_queue() 684 req = ahash_request_cast(async_req); in mtk_sha_handle_queue()
|
/Linux-v4.19/drivers/crypto/amcc/ |
D | crypto4xx_core.h | 78 struct crypto_async_request *async_req; /* base crypto request member
|
D | crypto4xx_core.c | 538 req = skcipher_request_cast(pd_uinfo->async_req); in crypto4xx_cipher_done() 570 ahash_req = ahash_request_cast(pd_uinfo->async_req); in crypto4xx_ahash_done() 586 struct aead_request *aead_req = container_of(pd_uinfo->async_req, in crypto4xx_aead_done() 651 switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) { in crypto4xx_pd_done() 808 pd_uinfo->async_req = req; in crypto4xx_build_pd()
|