/Linux-v5.4/crypto/ |
D | authencesn.c | 107 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local 113 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail() 116 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); in crypto_authenc_esn_genicv_tail() 141 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local 151 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv() 157 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv() 200 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local 220 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt() 241 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local 252 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_decrypt_tail() [all …]
|
D | keywrap.c | 129 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt() 137 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_decrypt() 153 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt() 207 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_encrypt() 226 unsigned int nbytes = req->cryptlen; in crypto_kw_encrypt()
|
D | chacha20poly1305.c | 41 __le64 cryptlen; member 61 unsigned int cryptlen; member 103 req->assoclen + rctx->cryptlen, in poly_verify_tag() 115 req->assoclen + rctx->cryptlen, in poly_copy_tag() 133 if (rctx->cryptlen == 0) in chacha_decrypt() 147 rctx->cryptlen, creq->iv); in chacha_decrypt() 160 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_tail_continue() 180 preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_tail() 209 padlen = -rctx->cryptlen % POLY1305_BLOCK_SIZE; in poly_cipherpad() 238 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_cipher() [all …]
|
D | aegis128-core.c | 349 u64 assoclen, u64 cryptlen) in crypto_aegis128_final() argument 352 u64 cryptbits = cryptlen * 8; in crypto_aegis128_final() 395 unsigned int cryptlen, in crypto_aegis128_crypt() argument 405 crypto_aegis128_final(&state, tag_xor, req->assoclen, cryptlen); in crypto_aegis128_crypt() 418 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt() local 425 crypto_aegis128_crypt(req, &tag, cryptlen, ops); in crypto_aegis128_encrypt() 427 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt() 443 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt() local 445 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, in crypto_aegis128_decrypt() 453 crypto_aegis128_crypt(req, &tag, cryptlen, ops); in crypto_aegis128_decrypt()
|
D | ccm.c | 136 unsigned int cryptlen) in format_input() argument 154 return set_msg_len(info + 16 - l, cryptlen, l); in format_input() 177 unsigned int cryptlen) in crypto_ccm_auth() argument 190 err = format_input(odata, req, cryptlen); in crypto_ccm_auth() 226 cryptlen += ilen; in crypto_ccm_auth() 229 ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); in crypto_ccm_auth() 244 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done() 300 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local 309 err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); in crypto_ccm_encrypt() 320 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt() [all …]
|
D | echainiv.c | 37 if (req->cryptlen < ivsize) in echainiv_encrypt() 51 req->assoclen + req->cryptlen, in echainiv_encrypt() 62 req->cryptlen, info); in echainiv_encrypt() 94 if (req->cryptlen < ivsize) in echainiv_decrypt() 104 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
|
D | gcm.c | 59 unsigned int cryptlen; member 184 unsigned int cryptlen) in crypto_gcm_init_crypt() argument 196 cryptlen + sizeof(pctx->auth_tag), in crypto_gcm_init_crypt() 237 lengths.b = cpu_to_be64(gctx->cryptlen * 8); in gcm_hash_len() 298 remain = gcm_remain(gctx->cryptlen); in gcm_hash_crypt_continue() 327 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue() 329 gctx->src, gctx->cryptlen, flags) ?: in gcm_hash_assoc_remain_continue() 425 req->assoclen + req->cryptlen, in gcm_enc_copy_hash() 436 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue() 464 crypto_gcm_init_crypt(req, req->cryptlen); in crypto_gcm_encrypt() [all …]
|
D | seqiv.c | 61 if (req->cryptlen < ivsize) in seqiv_aead_encrypt() 77 req->assoclen + req->cryptlen, in seqiv_aead_encrypt() 99 req->cryptlen - ivsize, info); in seqiv_aead_encrypt() 120 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt() 130 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
|
D | xts.c | 91 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xor_tweak() 167 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in cts_final() 170 int tail = req->cryptlen % XTS_BLOCK_SIZE; in cts_final() 211 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in encrypt_done() 231 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in decrypt_done() 247 if (req->cryptlen < XTS_BLOCK_SIZE) in init_crypt() 253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); in init_crypt() 272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in encrypt() 289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in decrypt()
|
D | aead.c | 92 unsigned int cryptlen = req->cryptlen; in crypto_aead_encrypt() local 100 crypto_stats_aead_encrypt(cryptlen, alg, ret); in crypto_aead_encrypt() 109 unsigned int cryptlen = req->cryptlen; in crypto_aead_decrypt() local 115 else if (req->cryptlen < crypto_aead_authsize(aead)) in crypto_aead_decrypt() 119 crypto_stats_aead_decrypt(cryptlen, alg, ret); in crypto_aead_decrypt()
|
D | authenc.c | 135 req->assoclen + req->cryptlen, in authenc_geniv_ahash_done() 159 req->assoclen + req->cryptlen); in crypto_authenc_genicv() 167 scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, in crypto_authenc_genicv() 210 unsigned int cryptlen = req->cryptlen; in crypto_authenc_encrypt() local 230 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt() 269 req->cryptlen - authsize, req->iv); in crypto_authenc_decrypt_tail() 306 req->assoclen + req->cryptlen - authsize); in crypto_authenc_decrypt()
|
D | skcipher.c | 453 walk->total = req->cryptlen; in skcipher_walk_skcipher() 555 walk->total = req->cryptlen; in skcipher_walk_aead() 564 walk->total = req->cryptlen; in skcipher_walk_aead_encrypt() 575 walk->total = req->cryptlen - crypto_aead_authsize(tfm); in skcipher_walk_aead_decrypt() 635 return crypt(&desc, req->dst, req->src, req->cryptlen); in skcipher_crypt_blkcipher() 731 ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in skcipher_crypt_ablkcipher() 848 unsigned int cryptlen = req->cryptlen; in crypto_skcipher_encrypt() local 856 crypto_stats_skcipher_encrypt(cryptlen, ret, alg); in crypto_skcipher_encrypt() 865 unsigned int cryptlen = req->cryptlen; in crypto_skcipher_decrypt() local 873 crypto_stats_skcipher_decrypt(cryptlen, ret, alg); in crypto_skcipher_decrypt()
|
/Linux-v5.4/arch/x86/crypto/ |
D | aegis128-aesni-glue.c | 44 void *state, void *tag_xor, unsigned int cryptlen, 169 unsigned int cryptlen, in crypto_aegis128_aesni_crypt() argument 184 crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); in crypto_aegis128_aesni_crypt() 200 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_aesni_encrypt() local 202 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_encrypt() 205 req->assoclen + cryptlen, authsize, 1); in crypto_aegis128_aesni_encrypt() 222 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_aesni_decrypt() local 225 req->assoclen + cryptlen, authsize, 0); in crypto_aegis128_aesni_decrypt() 227 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_decrypt()
|
D | glue_helper.c | 265 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in glue_xts_req_128bit() 273 if (req->cryptlen < XTS_BLOCK_SIZE) in glue_xts_req_128bit() 279 tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE; in glue_xts_req_128bit() 286 req->cryptlen - tail, req->iv); in glue_xts_req_128bit() 316 dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen); in glue_xts_req_128bit() 318 dst = scatterwalk_ffwd(d, req->dst, req->cryptlen); in glue_xts_req_128bit()
|
/Linux-v5.4/arch/arm/crypto/ |
D | aes-ce-glue.c | 281 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() 292 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 293 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 308 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 311 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 314 subreq.cryptlen); in cts_cbc_encrypt() 319 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt() 339 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt() 350 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt() 351 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() [all …]
|
/Linux-v5.4/drivers/crypto/ |
D | omap-aes-gcm.c | 90 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local 98 cryptlen = req->cryptlen; in omap_aes_gcm_copy_buffers() 104 cryptlen -= authlen; in omap_aes_gcm_copy_buffers() 107 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 109 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers() 125 if (cryptlen) { in omap_aes_gcm_copy_buffers() 128 ret = omap_crypto_align_sg(&tmp, cryptlen, in omap_aes_gcm_copy_buffers() 138 dd->total = cryptlen; in omap_aes_gcm_copy_buffers() 151 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers() 321 if (assoclen + req->cryptlen == 0) { in omap_aes_gcm_crypt()
|
/Linux-v5.4/drivers/crypto/cavium/nitrox/ |
D | nitrox_aead.c | 149 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 150 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 200 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc() 202 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc() 231 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec() 233 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec() 419 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc() 421 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc() 451 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec() 454 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
|
/Linux-v5.4/drivers/crypto/sunxi-ss/ |
D | sun4i-ss-cipher.c | 30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll() 31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll() 37 if (!areq->cryptlen) in sun4i_ss_opti_poll() 70 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 71 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 130 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback() 161 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll() 162 unsigned int oleft = areq->cryptlen; in sun4i_ss_cipher_poll() 172 if (!areq->cryptlen) in sun4i_ss_cipher_poll() 181 if (areq->cryptlen % algt->alg.crypto.base.cra_blocksize) in sun4i_ss_cipher_poll() [all …]
|
/Linux-v5.4/arch/arm64/crypto/ |
D | aes-glue.c | 294 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() 304 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 305 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 320 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 323 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 326 subreq.cryptlen); in cts_cbc_encrypt() 331 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt() 351 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt() 361 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt() 362 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() [all …]
|
/Linux-v5.4/include/crypto/ |
D | aead.h | 85 unsigned int cryptlen; member 483 unsigned int cryptlen, u8 *iv) in aead_request_set_crypt() argument 487 req->cryptlen = cryptlen; in aead_request_set_crypt()
|
D | skcipher.h | 25 unsigned int cryptlen; member 580 unsigned int cryptlen, void *iv) in skcipher_request_set_crypt() argument 584 req->cryptlen = cryptlen; in skcipher_request_set_crypt()
|
/Linux-v5.4/drivers/crypto/amcc/ |
D | crypto4xx_alg.c | 77 if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) in crypto4xx_crypt() 84 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, in crypto4xx_crypt() 236 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_encrypt() 251 req->cryptlen, iv, AES_IV_SIZE, in crypto4xx_rfc3686_decrypt() 262 unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / in crypto4xx_ctr_crypt() 279 req->cryptlen, req->iv); in crypto4xx_ctr_crypt() 370 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in crypto4xx_aead_fallback() 468 unsigned int len = req->cryptlen; in crypto4xx_crypt_aes_ccm() 614 unsigned int len = req->cryptlen; in crypto4xx_crypt_aes_gcm()
|
/Linux-v5.4/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 147 u32 cryptlen, u32 assoclen, u32 digestsize) in safexcel_aead_token() argument 165 cryptlen -= digestsize; in safexcel_aead_token() 188 if (likely(cryptlen)) { in safexcel_aead_token() 192 token[10].packet_length = cryptlen; in safexcel_aead_token() 236 cbcmaciv[14] = cryptlen >> 8; in safexcel_aead_token() 237 cbcmaciv[15] = cryptlen & 255; in safexcel_aead_token() 251 if (likely(cryptlen)) { in safexcel_aead_token() 258 cryptlen &= 15; in safexcel_aead_token() 259 token[11].packet_length = cryptlen ? 16 - cryptlen : 0; in safexcel_aead_token() 492 unsigned int cryptlen, in safexcel_handle_req_result() argument [all …]
|
/Linux-v5.4/drivers/crypto/marvell/ |
D | cipher.c | 42 mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen); in mv_cesa_skcipher_req_iter_init() 86 size_t len = min_t(size_t, req->cryptlen - sreq->offset, in mv_cesa_skcipher_std_step() 127 if (sreq->offset < req->cryptlen) in mv_cesa_skcipher_std_process() 205 atomic_sub(skreq->cryptlen, &engine->load); in mv_cesa_skcipher_complete() 418 if (!IS_ALIGNED(req->cryptlen, blksize)) in mv_cesa_skcipher_req_init() 421 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init() 426 creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in mv_cesa_skcipher_req_init() 454 engine = mv_cesa_select_engine(req->cryptlen); in mv_cesa_skcipher_queue_req()
|
/Linux-v5.4/drivers/crypto/ccree/ |
D | cc_aead.c | 240 areq->cryptlen, 0); in cc_aead_complete() 245 u32 skip = areq->cryptlen + areq_ctx->dst_offset; in cc_aead_complete() 749 areq_ctx->cryptlen > 0) in cc_set_assoc_desc() 759 areq_ctx->cryptlen > 0) in cc_set_assoc_desc() 796 areq_ctx->cryptlen, NS_BIT); in cc_proc_authen_desc() 846 if (areq_ctx->cryptlen == 0) in cc_proc_cipher_desc() 855 areq_ctx->src_offset), areq_ctx->cryptlen, in cc_proc_cipher_desc() 860 areq_ctx->cryptlen, NS_BIT, 0); in cc_proc_cipher_desc() 982 if (req_ctx->cryptlen == 0) in cc_proc_cipher() 1329 (req->cryptlen - ctx->authsize) : req->cryptlen; in validate_data_size() [all …]
|