Home
last modified time | relevance | path

Searched refs:areq (Results 1 – 25 of 48) sorted by relevance

12

/Linux-v4.19/drivers/crypto/sunxi-ss/
Dsun4i-ss-cipher.c19 static int sun4i_ss_opti_poll(struct skcipher_request *areq) in sun4i_ss_opti_poll() argument
21 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); in sun4i_ss_opti_poll()
25 struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq); in sun4i_ss_opti_poll()
34 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll()
35 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll()
41 if (!areq->cryptlen) in sun4i_ss_opti_poll()
44 if (!areq->iv) { in sun4i_ss_opti_poll()
49 if (!areq->src || !areq->dst) { in sun4i_ss_opti_poll()
59 if (areq->iv) { in sun4i_ss_opti_poll()
61 v = *(u32 *)(areq->iv + i * 4); in sun4i_ss_opti_poll()
[all …]
Dsun4i-ss-hash.c38 int sun4i_hash_init(struct ahash_request *areq) in sun4i_hash_init() argument
40 struct sun4i_req_ctx *op = ahash_request_ctx(areq); in sun4i_hash_init()
41 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); in sun4i_hash_init()
53 int sun4i_hash_export_md5(struct ahash_request *areq, void *out) in sun4i_hash_export_md5() argument
55 struct sun4i_req_ctx *op = ahash_request_ctx(areq); in sun4i_hash_export_md5()
76 int sun4i_hash_import_md5(struct ahash_request *areq, const void *in) in sun4i_hash_import_md5() argument
78 struct sun4i_req_ctx *op = ahash_request_ctx(areq); in sun4i_hash_import_md5()
82 sun4i_hash_init(areq); in sun4i_hash_import_md5()
95 int sun4i_hash_export_sha1(struct ahash_request *areq, void *out) in sun4i_hash_export_sha1() argument
97 struct sun4i_req_ctx *op = ahash_request_ctx(areq); in sun4i_hash_export_sha1()
[all …]
Dsun4i-ss.h180 int sun4i_hash_init(struct ahash_request *areq);
181 int sun4i_hash_update(struct ahash_request *areq);
182 int sun4i_hash_final(struct ahash_request *areq);
183 int sun4i_hash_finup(struct ahash_request *areq);
184 int sun4i_hash_digest(struct ahash_request *areq);
185 int sun4i_hash_export_md5(struct ahash_request *areq, void *out);
186 int sun4i_hash_import_md5(struct ahash_request *areq, const void *in);
187 int sun4i_hash_export_sha1(struct ahash_request *areq, void *out);
188 int sun4i_hash_import_sha1(struct ahash_request *areq, const void *in);
190 int sun4i_ss_cbc_aes_encrypt(struct skcipher_request *areq);
[all …]
/Linux-v4.19/drivers/crypto/inside-secure/
Dsafexcel_hash.c153 struct ahash_request *areq = ahash_request_cast(async); in safexcel_handle_req_result() local
154 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); in safexcel_handle_req_result()
155 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq); in safexcel_handle_req_result()
172 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE); in safexcel_handle_req_result()
189 memcpy(areq->result, sreq->state, in safexcel_handle_req_result()
204 struct ahash_request *areq = ahash_request_cast(async); in safexcel_ahash_send_req() local
205 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); in safexcel_ahash_send_req()
206 struct safexcel_ahash_req *req = ahash_request_ctx(areq); in safexcel_ahash_send_req()
207 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); in safexcel_ahash_send_req()
219 cache_len = queued - areq->nbytes; in safexcel_ahash_send_req()
[all …]
/Linux-v4.19/crypto/
Dalgif_skcipher.c64 struct af_alg_async_req *areq; in _skcipher_recvmsg() local
75 areq = af_alg_alloc_areq(sk, sizeof(struct af_alg_async_req) + in _skcipher_recvmsg()
77 if (IS_ERR(areq)) in _skcipher_recvmsg()
78 return PTR_ERR(areq); in _skcipher_recvmsg()
81 err = af_alg_get_rsgl(sk, msg, flags, areq, -1, &len); in _skcipher_recvmsg()
100 areq->tsgl_entries = af_alg_count_tsgl(sk, len, 0); in _skcipher_recvmsg()
101 if (!areq->tsgl_entries) in _skcipher_recvmsg()
102 areq->tsgl_entries = 1; in _skcipher_recvmsg()
103 areq->tsgl = sock_kmalloc(sk, array_size(sizeof(*areq->tsgl), in _skcipher_recvmsg()
104 areq->tsgl_entries), in _skcipher_recvmsg()
[all …]
Dalgif_aead.c104 struct af_alg_async_req *areq; in _aead_recvmsg() local
157 areq = af_alg_alloc_areq(sk, sizeof(struct af_alg_async_req) + in _aead_recvmsg()
159 if (IS_ERR(areq)) in _aead_recvmsg()
160 return PTR_ERR(areq); in _aead_recvmsg()
163 err = af_alg_get_rsgl(sk, msg, flags, areq, outlen, &usedpages); in _aead_recvmsg()
217 rsgl_src = areq->first_rsgl.sgl.sg; in _aead_recvmsg()
231 areq->first_rsgl.sgl.sg, processed); in _aead_recvmsg()
249 areq->first_rsgl.sgl.sg, outlen); in _aead_recvmsg()
254 areq->tsgl_entries = af_alg_count_tsgl(sk, processed, in _aead_recvmsg()
256 if (!areq->tsgl_entries) in _aead_recvmsg()
[all …]
Daf_alg.c657 void af_alg_free_areq_sgls(struct af_alg_async_req *areq) in af_alg_free_areq_sgls() argument
659 struct sock *sk = areq->sk; in af_alg_free_areq_sgls()
667 list_for_each_entry_safe(rsgl, tmp, &areq->rsgl_list, list) { in af_alg_free_areq_sgls()
671 if (rsgl != &areq->first_rsgl) in af_alg_free_areq_sgls()
675 tsgl = areq->tsgl; in af_alg_free_areq_sgls()
677 for_each_sg(tsgl, sg, areq->tsgl_entries, i) { in af_alg_free_areq_sgls()
683 sock_kfree_s(sk, tsgl, areq->tsgl_entries * sizeof(*tsgl)); in af_alg_free_areq_sgls()
1028 void af_alg_free_resources(struct af_alg_async_req *areq) in af_alg_free_resources() argument
1030 struct sock *sk = areq->sk; in af_alg_free_resources()
1032 af_alg_free_areq_sgls(areq); in af_alg_free_resources()
[all …]
Dahash.c312 struct ahash_request *areq = req->data; in ahash_op_unaligned_done() local
315 ahash_notify_einprogress(areq); in ahash_op_unaligned_done()
329 ahash_restore_req(areq, err); in ahash_op_unaligned_done()
332 areq->base.complete(&areq->base, err); in ahash_op_unaligned_done()
390 struct ahash_request *areq = req->data; in ahash_def_finup_done2() local
395 ahash_restore_req(areq, err); in ahash_def_finup_done2()
397 areq->base.complete(&areq->base, err); in ahash_def_finup_done2()
418 struct ahash_request *areq = req->data; in ahash_def_finup_done1() local
421 ahash_notify_einprogress(areq); in ahash_def_finup_done1()
425 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; in ahash_def_finup_done1()
[all …]
Dchacha20poly1305.c120 static void chacha_decrypt_done(struct crypto_async_request *areq, int err) in chacha_decrypt_done() argument
122 async_done_continue(areq->data, err, poly_verify_tag); in chacha_decrypt_done()
170 static void poly_tail_done(struct crypto_async_request *areq, int err) in poly_tail_done() argument
172 async_done_continue(areq->data, err, poly_tail_continue); in poly_tail_done()
204 static void poly_cipherpad_done(struct crypto_async_request *areq, int err) in poly_cipherpad_done() argument
206 async_done_continue(areq->data, err, poly_tail); in poly_cipherpad_done()
234 static void poly_cipher_done(struct crypto_async_request *areq, int err) in poly_cipher_done() argument
236 async_done_continue(areq->data, err, poly_cipherpad); in poly_cipher_done()
265 static void poly_adpad_done(struct crypto_async_request *areq, int err) in poly_adpad_done() argument
267 async_done_continue(areq->data, err, poly_cipher); in poly_adpad_done()
[all …]
Dgcm.c222 static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
271 static void gcm_hash_len_done(struct crypto_async_request *areq, int err) in gcm_hash_len_done() argument
273 struct aead_request *req = areq->data; in gcm_hash_len_done()
292 static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq, in gcm_hash_crypt_remain_done() argument
295 struct aead_request *req = areq->data; in gcm_hash_crypt_remain_done()
323 static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err) in gcm_hash_crypt_done() argument
325 struct aead_request *req = areq->data; in gcm_hash_crypt_done()
351 static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq, in gcm_hash_assoc_remain_done() argument
354 struct aead_request *req = areq->data; in gcm_hash_assoc_remain_done()
380 static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err) in gcm_hash_assoc_done() argument
[all …]
Dcts.c92 static void cts_cbc_crypt_done(struct crypto_async_request *areq, int err) in cts_cbc_crypt_done() argument
94 struct skcipher_request *req = areq->data; in cts_cbc_crypt_done()
132 static void crypto_cts_encrypt_done(struct crypto_async_request *areq, int err) in crypto_cts_encrypt_done() argument
134 struct skcipher_request *req = areq->data; in crypto_cts_encrypt_done()
224 static void crypto_cts_decrypt_done(struct crypto_async_request *areq, int err) in crypto_cts_decrypt_done() argument
226 struct skcipher_request *req = areq->data; in crypto_cts_decrypt_done()
/Linux-v4.19/drivers/crypto/
Dtalitos.c976 struct aead_request *areq) in ipsec_esp_unmap() argument
978 struct crypto_aead *aead = crypto_aead_reqtfm(areq); in ipsec_esp_unmap()
989 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, in ipsec_esp_unmap()
990 areq->assoclen); in ipsec_esp_unmap()
999 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize, in ipsec_esp_unmap()
1000 areq->assoclen + areq->cryptlen - ivsize); in ipsec_esp_unmap()
1013 struct aead_request *areq = context; in ipsec_esp_encrypt_done() local
1014 struct crypto_aead *authenc = crypto_aead_reqtfm(areq); in ipsec_esp_encrypt_done()
1023 ipsec_esp_unmap(dev, edesc, areq); in ipsec_esp_encrypt_done()
1028 icvdata = edesc->buf + areq->assoclen + areq->cryptlen; in ipsec_esp_encrypt_done()
[all …]
Dpicoxcell_crypto.c323 static int spacc_aead_make_ddts(struct aead_request *areq) in spacc_aead_make_ddts() argument
325 struct crypto_aead *aead = crypto_aead_reqtfm(areq); in spacc_aead_make_ddts()
326 struct spacc_req *req = aead_request_ctx(areq); in spacc_aead_make_ddts()
334 total = areq->assoclen + areq->cryptlen; in spacc_aead_make_ddts()
338 src_nents = sg_nents_for_len(areq->src, total); in spacc_aead_make_ddts()
347 if (areq->src != areq->dst) { in spacc_aead_make_ddts()
348 dst_nents = sg_nents_for_len(areq->dst, total); in spacc_aead_make_ddts()
369 src_ents = dma_map_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
374 dst_ents = dma_map_sg(engine->dev, areq->dst, dst_nents, in spacc_aead_make_ddts()
378 dma_unmap_sg(engine->dev, areq->src, src_nents, in spacc_aead_make_ddts()
[all …]
Datmel-ecc.c109 void (*cbk)(struct atmel_ecc_work_data *work_data, void *areq,
111 void *areq; member
268 static void atmel_ecdh_done(struct atmel_ecc_work_data *work_data, void *areq, in atmel_ecdh_done() argument
271 struct kpp_request *req = areq; in atmel_ecdh_done()
355 work_data->cbk(work_data, work_data->areq, status); in atmel_ecc_work_handler()
360 void *areq, int status), in atmel_ecc_enqueue() argument
361 void *areq) in atmel_ecc_enqueue()
364 work_data->areq = areq; in atmel_ecc_enqueue()
Datmel-aes.c190 struct crypto_async_request *areq; member
508 ablkcipher_request_cast(dd->areq); in atmel_aes_complete()
528 dd->areq->complete(dd->areq, err); in atmel_aes_complete()
935 struct crypto_async_request *areq, *backlog; in atmel_aes_handle_queue() local
949 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
950 if (areq) in atmel_aes_handle_queue()
954 if (!areq) in atmel_aes_handle_queue()
960 ctx = crypto_tfm_ctx(areq->tfm); in atmel_aes_handle_queue()
962 dd->areq = areq; in atmel_aes_handle_queue()
964 start_async = (areq != new_areq); in atmel_aes_handle_queue()
[all …]
/Linux-v4.19/arch/x86/crypto/sha1-mb/
Dsha1_mb.c83 struct ahash_request *areq; in cast_hash_to_mcryptd_ctx() local
85 areq = container_of((void *) hash_ctx, struct ahash_request, __ctx); in cast_hash_to_mcryptd_ctx()
86 return container_of(areq, struct mcryptd_hash_request_ctx, areq); in cast_hash_to_mcryptd_ctx()
96 struct ahash_request *areq) in req_ctx_init() argument
356 static int sha1_mb_init(struct ahash_request *areq) in sha1_mb_init() argument
358 struct sha1_hash_ctx *sctx = ahash_request_ctx(areq); in sha1_mb_init()
376 struct sha1_hash_ctx *sctx = ahash_request_ctx(&rctx->areq); in sha1_mb_set_results()
408 ahash_request_ctx(&rctx->areq); in sha_finish_walk()
500 static int sha1_mb_update(struct ahash_request *areq) in sha1_mb_update() argument
503 container_of(areq, struct mcryptd_hash_request_ctx, areq); in sha1_mb_update()
[all …]
/Linux-v4.19/arch/x86/crypto/sha256-mb/
Dsha256_mb.c83 struct ahash_request *areq; in cast_hash_to_mcryptd_ctx() local
85 areq = container_of((void *) hash_ctx, struct ahash_request, __ctx); in cast_hash_to_mcryptd_ctx()
86 return container_of(areq, struct mcryptd_hash_request_ctx, areq); in cast_hash_to_mcryptd_ctx()
96 struct ahash_request *areq) in req_ctx_init() argument
352 static int sha256_mb_init(struct ahash_request *areq) in sha256_mb_init() argument
354 struct sha256_hash_ctx *sctx = ahash_request_ctx(areq); in sha256_mb_init()
375 struct sha256_hash_ctx *sctx = ahash_request_ctx(&rctx->areq); in sha256_mb_set_results()
407 ahash_request_ctx(&rctx->areq); in sha_finish_walk()
499 static int sha256_mb_update(struct ahash_request *areq) in sha256_mb_update() argument
502 container_of(areq, struct mcryptd_hash_request_ctx, areq); in sha256_mb_update()
[all …]
/Linux-v4.19/arch/x86/crypto/sha512-mb/
Dsha512_mb.c83 struct ahash_request *areq; in cast_hash_to_mcryptd_ctx() local
85 areq = container_of((void *) hash_ctx, struct ahash_request, __ctx); in cast_hash_to_mcryptd_ctx()
86 return container_of(areq, struct mcryptd_hash_request_ctx, areq); in cast_hash_to_mcryptd_ctx()
96 struct ahash_request *areq) in req_ctx_init() argument
381 static int sha512_mb_init(struct ahash_request *areq) in sha512_mb_init() argument
383 struct sha512_hash_ctx *sctx = ahash_request_ctx(areq); in sha512_mb_init()
404 struct sha512_hash_ctx *sctx = ahash_request_ctx(&rctx->areq); in sha512_mb_set_results()
436 ahash_request_ctx(&rctx->areq); in sha_finish_walk()
530 static int sha512_mb_update(struct ahash_request *areq) in sha512_mb_update() argument
533 container_of(areq, struct mcryptd_hash_request_ctx, in sha512_mb_update()
[all …]
/Linux-v4.19/drivers/crypto/stm32/
Dstm32-cryp.c142 struct aead_request *areq; member
448 return is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_get_input_text_len()
449 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_get_input_text_len()
458 memcpy(iv, cryp->areq->iv, 12); in stm32_cryp_gcm_init()
481 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_ccm_init()
491 if (cryp->areq->assoclen) in stm32_cryp_ccm_init()
588 if (cryp->areq->assoclen) { in stm32_cryp_hw_init()
649 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); in stm32_cryp_finish_req()
650 cryp->areq = NULL; in stm32_cryp_finish_req()
670 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
[all …]
/Linux-v4.19/drivers/crypto/axis/
Dartpec6_crypto.c363 static int artpec6_crypto_prepare_aead(struct aead_request *areq);
364 static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq);
365 static int artpec6_crypto_prepare_hash(struct ahash_request *areq);
1313 static int artpec6_crypto_prepare_hash(struct ahash_request *areq) in artpec6_crypto_prepare_hash() argument
1315 struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm); in artpec6_crypto_prepare_hash()
1316 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq); in artpec6_crypto_prepare_hash()
1317 size_t digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq)); in artpec6_crypto_prepare_hash()
1321 crypto_ahash_tfm(crypto_ahash_reqtfm(areq))); in artpec6_crypto_prepare_hash()
1407 size_t total_bytes = areq->nbytes + req_ctx->partial_bytes; in artpec6_crypto_prepare_hash()
1433 artpec6_crypto_walk_init(&walk, areq->src); in artpec6_crypto_prepare_hash()
[all …]
/Linux-v4.19/drivers/crypto/qat/qat_common/
Dqat_algs.c786 struct aead_request *areq = qat_req->aead_req; in qat_aead_alg_callback() local
793 areq->base.complete(&areq->base, res); in qat_aead_alg_callback()
801 struct ablkcipher_request *areq = qat_req->ablkcipher_req; in qat_ablkcipher_alg_callback() local
808 areq->base.complete(&areq->base, res); in qat_ablkcipher_alg_callback()
820 static int qat_alg_aead_dec(struct aead_request *areq) in qat_alg_aead_dec() argument
822 struct crypto_aead *aead_tfm = crypto_aead_reqtfm(areq); in qat_alg_aead_dec()
825 struct qat_crypto_request *qat_req = aead_request_ctx(areq); in qat_alg_aead_dec()
832 ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req); in qat_alg_aead_dec()
839 qat_req->aead_req = areq; in qat_alg_aead_dec()
845 cipher_param->cipher_length = areq->cryptlen - digst_size; in qat_alg_aead_dec()
[all …]
Dqat_asym_algs.c180 } areq; member
188 struct kpp_request *areq = req->areq.dh; in qat_dh_cb() local
195 if (areq->src) { in qat_dh_cb()
204 areq->dst_len = req->ctx.dh->p_size; in qat_dh_cb()
206 scatterwalk_map_and_copy(req->dst_align, areq->dst, 0, in qat_dh_cb()
207 areq->dst_len, 1); in qat_dh_cb()
222 kpp_request_complete(areq, err); in qat_dh_cb()
287 qat_req->areq.dh = req; in qat_dh_compute_value()
556 struct akcipher_request *areq = req->areq.rsa; in qat_rsa_cb() local
570 areq->dst_len = req->ctx.rsa->key_sz; in qat_rsa_cb()
[all …]
/Linux-v4.19/include/crypto/
Dengine.h80 void *areq);
82 void *areq);
84 void *areq);
/Linux-v4.19/drivers/crypto/caam/
Djr.h16 void *areq),
17 void *areq);
/Linux-v4.19/drivers/crypto/ux500/cryp/
Dcryp_core.c825 static int ablk_dma_crypt(struct ablkcipher_request *areq) in ablk_dma_crypt() argument
827 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); in ablk_dma_crypt()
837 ctx->datalen = areq->nbytes; in ablk_dma_crypt()
838 ctx->outlen = areq->nbytes; in ablk_dma_crypt()
849 ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen); in ablk_dma_crypt()
850 ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen); in ablk_dma_crypt()
855 bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen); in ablk_dma_crypt()
856 bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written); in ablk_dma_crypt()
882 static int ablk_crypt(struct ablkcipher_request *areq) in ablk_crypt() argument
885 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); in ablk_crypt()
[all …]

12