| /Linux-v6.6/drivers/crypto/qce/ | 
| D | core.c | 66 static int qce_handle_request(struct crypto_async_request *async_req)  in qce_handle_request()  argument70 	u32 type = crypto_tfm_alg_type(async_req->tfm);  in qce_handle_request()
 76 		ret = ops->async_req_handle(async_req);  in qce_handle_request()
 86 	struct crypto_async_request *async_req, *backlog;  in qce_handle_queue()  local
 102 	async_req = crypto_dequeue_request(&qce->queue);  in qce_handle_queue()
 103 	if (async_req)  in qce_handle_queue()
 104 		qce->req = async_req;  in qce_handle_queue()
 108 	if (!async_req)  in qce_handle_queue()
 117 	err = qce_handle_request(async_req);  in qce_handle_queue()
 
 | 
| D | common.c | 147 static int qce_setup_regs_ahash(struct crypto_async_request *async_req)  in qce_setup_regs_ahash()  argument149 	struct ahash_request *req = ahash_request_cast(async_req);  in qce_setup_regs_ahash()
 150 	struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm);  in qce_setup_regs_ahash()
 152 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);  in qce_setup_regs_ahash()
 155 	unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm);  in qce_setup_regs_ahash()
 314 static int qce_setup_regs_skcipher(struct crypto_async_request *async_req)  in qce_setup_regs_skcipher()  argument
 316 	struct skcipher_request *req = skcipher_request_cast(async_req);  in qce_setup_regs_skcipher()
 318 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm);  in qce_setup_regs_skcipher()
 420 static int qce_setup_regs_aead(struct crypto_async_request *async_req)  in qce_setup_regs_aead()  argument
 422 	struct aead_request *req = aead_request_cast(async_req);  in qce_setup_regs_aead()
 [all …]
 
 | 
| D | sha.c | 38 	struct crypto_async_request *async_req = data;  in qce_ahash_done()  local39 	struct ahash_request *req = ahash_request_cast(async_req);  in qce_ahash_done()
 42 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);  in qce_ahash_done()
 75 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req)  in qce_ahash_async_req_handle()  argument
 77 	struct ahash_request *req = ahash_request_cast(async_req);  in qce_ahash_async_req_handle()
 79 	struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm);  in qce_ahash_async_req_handle()
 80 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);  in qce_ahash_async_req_handle()
 112 			       &rctx->result_sg, 1, qce_ahash_done, async_req);  in qce_ahash_async_req_handle()
 118 	ret = qce_start(async_req, tmpl->crypto_alg_type);  in qce_ahash_async_req_handle()
 
 | 
| D | skcipher.c | 29 	struct crypto_async_request *async_req = data;  in qce_skcipher_done()  local30 	struct skcipher_request *req = skcipher_request_cast(async_req);  in qce_skcipher_done()
 64 qce_skcipher_async_req_handle(struct crypto_async_request *async_req)  in qce_skcipher_async_req_handle()  argument
 66 	struct skcipher_request *req = skcipher_request_cast(async_req);  in qce_skcipher_async_req_handle()
 146 			       qce_skcipher_done, async_req);  in qce_skcipher_async_req_handle()
 152 	ret = qce_start(async_req, tmpl->crypto_alg_type);  in qce_skcipher_async_req_handle()
 
 | 
| D | aead.c | 25 	struct crypto_async_request *async_req = data;  in qce_aead_done()  local26 	struct aead_request *req = aead_request_cast(async_req);  in qce_aead_done()
 28 	struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);  in qce_aead_done()
 408 qce_aead_async_req_handle(struct crypto_async_request *async_req)  in qce_aead_async_req_handle()  argument
 410 	struct aead_request *req = aead_request_cast(async_req);  in qce_aead_async_req_handle()
 413 	struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);  in qce_aead_async_req_handle()
 472 			       qce_aead_done, async_req);  in qce_aead_async_req_handle()
 478 	ret = qce_start(async_req, tmpl->crypto_alg_type);  in qce_aead_async_req_handle()
 
 | 
| D | core.h | 58 	int (*async_req_handle)(struct crypto_async_request *async_req);
 | 
| D | common.h | 102 int qce_start(struct crypto_async_request *async_req, u32 type);
 | 
| /Linux-v6.6/crypto/ | 
| D | crypto_engine.c | 76 	struct crypto_async_request *async_req, *backlog;  in crypto_pump_requests()  local123 	async_req = crypto_dequeue_request(&engine->queue);  in crypto_pump_requests()
 124 	if (!async_req)  in crypto_pump_requests()
 133 		engine->cur_req = async_req;  in crypto_pump_requests()
 151 	if (async_req->tfm->__crt_alg->cra_flags & CRYPTO_ALG_ENGINE) {  in crypto_pump_requests()
 152 		alg = container_of(async_req->tfm->__crt_alg,  in crypto_pump_requests()
 161 	ret = op->do_one_request(engine, async_req);  in crypto_pump_requests()
 183 		crypto_enqueue_request_head(&engine->queue, async_req);  in crypto_pump_requests()
 192 	crypto_request_complete(async_req, ret);  in crypto_pump_requests()
 
 | 
| /Linux-v6.6/drivers/crypto/ccp/ | 
| D | ccp-crypto-aes.c | 22 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret)  in ccp_aes_complete()  argument24 	struct skcipher_request *req = skcipher_request_cast(async_req);  in ccp_aes_complete()
 132 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req,  in ccp_aes_rfc3686_complete()  argument
 135 	struct skcipher_request *req = skcipher_request_cast(async_req);  in ccp_aes_rfc3686_complete()
 141 	return ccp_aes_complete(async_req, ret);  in ccp_aes_rfc3686_complete()
 
 | 
| D | ccp-crypto-des3.c | 21 static int ccp_des3_complete(struct crypto_async_request *async_req, int ret)  in ccp_des3_complete()  argument23 	struct skcipher_request *req = skcipher_request_cast(async_req);  in ccp_des3_complete()
 
 | 
| D | ccp-crypto-aes-xts.c | 62 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret)  in ccp_aes_xts_complete()  argument64 	struct skcipher_request *req = skcipher_request_cast(async_req);  in ccp_aes_xts_complete()
 
 | 
| D | ccp-crypto-rsa.c | 44 static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret)  in ccp_rsa_complete()  argument46 	struct akcipher_request *req = akcipher_request_cast(async_req);  in ccp_rsa_complete()
 
 | 
| D | ccp-crypto-aes-cmac.c | 23 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req,  in ccp_aes_cmac_complete()  argument26 	struct ahash_request *req = ahash_request_cast(async_req);  in ccp_aes_cmac_complete()
 
 | 
| D | ccp-crypto-sha.c | 27 static int ccp_sha_complete(struct crypto_async_request *async_req, int ret)  in ccp_sha_complete()  argument29 	struct ahash_request *req = ahash_request_cast(async_req);  in ccp_sha_complete()
 
 | 
| D | ccp-crypto-aes-galois.c | 24 static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret)  in ccp_aes_gcm_complete()  argument
 | 
| /Linux-v6.6/drivers/crypto/allwinner/sun8i-ce/ | 
| D | sun8i-ce-cipher.c | 120 static int sun8i_ce_cipher_prepare(struct crypto_engine *engine, void *async_req)  in sun8i_ce_cipher_prepare()  argument122 	struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);  in sun8i_ce_cipher_prepare()
 319 				      void *async_req)  in sun8i_ce_cipher_unprepare()  argument
 321 	struct skcipher_request *areq = container_of(async_req, struct skcipher_request, base);  in sun8i_ce_cipher_unprepare()
 
 | 
| /Linux-v6.6/drivers/net/usb/ | 
| D | rtl8150.c | 141 struct async_req {  struct169 	struct async_req *req = (struct async_req *)urb->context;  in async_set_reg_cb()
 182 	struct async_req *req;  in async_set_registers()
 184 	req = kmalloc(sizeof(struct async_req), GFP_ATOMIC);  in async_set_registers()
 
 | 
| /Linux-v6.6/drivers/crypto/ | 
| D | s5p-sss.c | 1373 	struct crypto_async_request *async_req, *backlog;  in s5p_hash_handle_queue()  local1389 	async_req = crypto_dequeue_request(&dd->hash_queue);  in s5p_hash_handle_queue()
 1390 	if (async_req)  in s5p_hash_handle_queue()
 1395 	if (!async_req)  in s5p_hash_handle_queue()
 1401 	req = ahash_request_cast(async_req);  in s5p_hash_handle_queue()
 1977 	struct crypto_async_request *async_req, *backlog;  in s5p_tasklet_cb()  local
 1983 	async_req = crypto_dequeue_request(&dev->queue);  in s5p_tasklet_cb()
 1985 	if (!async_req) {  in s5p_tasklet_cb()
 1995 	dev->req = skcipher_request_cast(async_req);  in s5p_tasklet_cb()
 
 | 
| D | sahara.c | 1038 	struct crypto_async_request *async_req;  in sahara_queue_manage()  local1047 		async_req = crypto_dequeue_request(&dev->queue);  in sahara_queue_manage()
 1053 		if (async_req) {  in sahara_queue_manage()
 1054 			if (crypto_tfm_alg_type(async_req->tfm) ==  in sahara_queue_manage()
 1057 					ahash_request_cast(async_req);  in sahara_queue_manage()
 1062 					skcipher_request_cast(async_req);  in sahara_queue_manage()
 1067 			crypto_request_complete(async_req, ret);  in sahara_queue_manage()
 
 | 
| D | img-hash.c | 503 	struct crypto_async_request *async_req, *backlog;  in img_hash_handle_queue()  local519 	async_req = crypto_dequeue_request(&hdev->queue);  in img_hash_handle_queue()
 520 	if (async_req)  in img_hash_handle_queue()
 525 	if (!async_req)  in img_hash_handle_queue()
 531 	req = ahash_request_cast(async_req);  in img_hash_handle_queue()
 
 | 
| D | atmel-tdes.c | 594 	struct crypto_async_request *async_req, *backlog;  in atmel_tdes_handle_queue()  local608 	async_req = crypto_dequeue_request(&dd->queue);  in atmel_tdes_handle_queue()
 609 	if (async_req)  in atmel_tdes_handle_queue()
 613 	if (!async_req)  in atmel_tdes_handle_queue()
 619 	req = skcipher_request_cast(async_req);  in atmel_tdes_handle_queue()
 
 | 
| D | hifn_795x.c | 1915 	struct crypto_async_request *async_req;  in hifn_flush()  local1931 	while ((async_req = crypto_dequeue_request(&dev->queue))) {  in hifn_flush()
 1932 		req = skcipher_request_cast(async_req);  in hifn_flush()
 2042 	struct crypto_async_request *async_req, *backlog;  in hifn_process_queue()  local
 2050 		async_req = crypto_dequeue_request(&dev->queue);  in hifn_process_queue()
 2053 		if (!async_req)  in hifn_process_queue()
 2059 		req = skcipher_request_cast(async_req);  in hifn_process_queue()
 
 | 
| /Linux-v6.6/drivers/nvme/host/ | 
| D | tcp.c | 171 	struct nvme_tcp_request async_req;  member232 	return req == &req->queue->ctrl->async_req;  in nvme_tcp_async_req()
 1300 	struct nvme_tcp_request *async = &ctrl->async_req;  in nvme_tcp_free_async_req()
 1308 	struct nvme_tcp_request *async = &ctrl->async_req;  in nvme_tcp_alloc_async_req()
 1728 	if (to_tcp_ctrl(ctrl)->async_req.pdu) {  in nvme_tcp_free_admin_queue()
 1731 		to_tcp_ctrl(ctrl)->async_req.pdu = NULL;  in nvme_tcp_free_admin_queue()
 2231 	struct nvme_tcp_cmd_pdu *pdu = ctrl->async_req.pdu;  in nvme_tcp_submit_async_event()
 2247 	ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU;  in nvme_tcp_submit_async_event()
 2248 	ctrl->async_req.offset = 0;  in nvme_tcp_submit_async_event()
 2249 	ctrl->async_req.curr_bio = NULL;  in nvme_tcp_submit_async_event()
 [all …]
 
 | 
| /Linux-v6.6/drivers/crypto/amcc/ | 
| D | crypto4xx_core.c | 526 	req = skcipher_request_cast(pd_uinfo->async_req);  in crypto4xx_cipher_done()558 	ahash_req = ahash_request_cast(pd_uinfo->async_req);  in crypto4xx_ahash_done()
 573 	struct aead_request *aead_req = container_of(pd_uinfo->async_req,  in crypto4xx_aead_done()
 638 	switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {  in crypto4xx_pd_done()
 814 	pd_uinfo->async_req = req;  in crypto4xx_build_pd()
 
 | 
| D | crypto4xx_core.h | 71 	struct crypto_async_request *async_req; 	/* base crypto request  member
 |