/Linux-v5.10/crypto/ |
D | authencesn.c | 97 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local 103 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail() 106 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); in crypto_authenc_esn_genicv_tail() 131 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local 141 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv() 147 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv() 190 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local 210 skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt() 231 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local 242 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_decrypt_tail() [all …]
|
D | keywrap.c | 129 u64 t = 6 * ((req->cryptlen) >> 3); in crypto_kw_decrypt() 137 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_decrypt() 153 unsigned int nbytes = req->cryptlen; in crypto_kw_decrypt() 207 if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) in crypto_kw_encrypt() 226 unsigned int nbytes = req->cryptlen; in crypto_kw_encrypt()
|
D | chacha20poly1305.c | 39 __le64 cryptlen; member 59 unsigned int cryptlen; member 101 req->assoclen + rctx->cryptlen, in poly_verify_tag() 113 req->assoclen + rctx->cryptlen, in poly_copy_tag() 131 if (rctx->cryptlen == 0) in chacha_decrypt() 145 rctx->cryptlen, creq->iv); in chacha_decrypt() 158 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_tail_continue() 178 preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); in poly_tail() 207 padlen = -rctx->cryptlen % POLY1305_BLOCK_SIZE; in poly_cipherpad() 236 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_cipher() [all …]
|
D | aegis128-core.c | 72 u64 assoclen, u64 cryptlen); 350 u64 assoclen, u64 cryptlen) in crypto_aegis128_final() argument 353 u64 cryptbits = cryptlen * 8; in crypto_aegis128_final() 398 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_encrypt() local 409 cryptlen); in crypto_aegis128_encrypt() 415 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); in crypto_aegis128_encrypt() 418 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, in crypto_aegis128_encrypt() 429 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_decrypt() local 434 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, in crypto_aegis128_decrypt() 444 cryptlen); in crypto_aegis128_decrypt() [all …]
|
D | ccm.c | 127 unsigned int cryptlen) in format_input() argument 145 return set_msg_len(info + 16 - l, cryptlen, l); in format_input() 168 unsigned int cryptlen) in crypto_ccm_auth() argument 181 err = format_input(odata, req, cryptlen); in crypto_ccm_auth() 217 cryptlen += ilen; in crypto_ccm_auth() 220 ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); in crypto_ccm_auth() 235 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done() 291 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local 300 err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); in crypto_ccm_encrypt() 311 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt() [all …]
|
D | gcm.c | 58 unsigned int cryptlen; member 178 unsigned int cryptlen) in crypto_gcm_init_crypt() argument 190 cryptlen + sizeof(pctx->auth_tag), in crypto_gcm_init_crypt() 231 lengths.b = cpu_to_be64(gctx->cryptlen * 8); in gcm_hash_len() 292 remain = gcm_remain(gctx->cryptlen); in gcm_hash_crypt_continue() 321 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue() 323 gctx->src, gctx->cryptlen, flags) ?: in gcm_hash_assoc_remain_continue() 419 req->assoclen + req->cryptlen, in gcm_enc_copy_hash() 430 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue() 458 crypto_gcm_init_crypt(req, req->cryptlen); in crypto_gcm_encrypt() [all …]
|
D | echainiv.c | 37 if (req->cryptlen < ivsize) in echainiv_encrypt() 51 req->assoclen + req->cryptlen, in echainiv_encrypt() 62 req->cryptlen, info); in echainiv_encrypt() 94 if (req->cryptlen < ivsize) in echainiv_decrypt() 104 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
|
D | aead.c | 87 unsigned int cryptlen = req->cryptlen; in crypto_aead_encrypt() local 95 crypto_stats_aead_encrypt(cryptlen, alg, ret); in crypto_aead_encrypt() 104 unsigned int cryptlen = req->cryptlen; in crypto_aead_decrypt() local 110 else if (req->cryptlen < crypto_aead_authsize(aead)) in crypto_aead_decrypt() 114 crypto_stats_aead_decrypt(cryptlen, alg, ret); in crypto_aead_decrypt()
|
D | aegis128-neon.c | 18 uint64_t cryptlen); 65 u64 assoclen, u64 cryptlen) in crypto_aegis128_final_simd() argument 68 crypto_aegis128_final_neon(state, tag_xor, assoclen, cryptlen); in crypto_aegis128_final_simd()
|
D | seqiv.c | 59 if (req->cryptlen < ivsize) in seqiv_aead_encrypt() 75 req->assoclen + req->cryptlen, in seqiv_aead_encrypt() 97 req->cryptlen - ivsize, info); in seqiv_aead_encrypt() 118 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt() 128 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
|
D | xts.c | 86 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); in xts_xor_tweak() 163 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); in xts_cts_final() 166 int tail = req->cryptlen % XTS_BLOCK_SIZE; in xts_cts_final() 208 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_encrypt_done() 228 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { in xts_decrypt_done() 246 if (req->cryptlen < XTS_BLOCK_SIZE) in xts_init_crypt() 252 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); in xts_init_crypt() 271 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_encrypt() 288 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) in xts_decrypt()
|
/Linux-v5.10/arch/x86/crypto/ |
D | aegis128-aesni-glue.c | 44 void *state, void *tag_xor, unsigned int cryptlen, 167 unsigned int cryptlen, in crypto_aegis128_aesni_crypt() argument 182 crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); in crypto_aegis128_aesni_crypt() 198 unsigned int cryptlen = req->cryptlen; in crypto_aegis128_aesni_encrypt() local 200 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_encrypt() 203 req->assoclen + cryptlen, authsize, 1); in crypto_aegis128_aesni_encrypt() 220 unsigned int cryptlen = req->cryptlen - authsize; in crypto_aegis128_aesni_decrypt() local 223 req->assoclen + cryptlen, authsize, 0); in crypto_aegis128_aesni_decrypt() 225 crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); in crypto_aegis128_aesni_decrypt()
|
/Linux-v5.10/arch/arm/crypto/ |
D | aes-ce-glue.c | 271 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() 282 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 283 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 298 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 301 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 304 subreq.cryptlen); in cts_cbc_encrypt() 309 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt() 329 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt() 340 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt() 341 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() [all …]
|
/Linux-v5.10/drivers/crypto/ |
D | omap-aes-gcm.c | 91 int alen, clen, cryptlen, assoclen, ret; in omap_aes_gcm_copy_buffers() local 99 cryptlen = req->cryptlen; in omap_aes_gcm_copy_buffers() 105 cryptlen -= authlen; in omap_aes_gcm_copy_buffers() 108 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 110 nsg = !!(assoclen && cryptlen); in omap_aes_gcm_copy_buffers() 128 if (cryptlen) { in omap_aes_gcm_copy_buffers() 134 ret = omap_crypto_align_sg(&tmp, cryptlen, in omap_aes_gcm_copy_buffers() 146 dd->total = cryptlen; in omap_aes_gcm_copy_buffers() 159 if (cryptlen) { in omap_aes_gcm_copy_buffers() 160 ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, in omap_aes_gcm_copy_buffers() [all …]
|
/Linux-v5.10/drivers/crypto/allwinner/sun8i-ce/ |
D | sun8i-ce-cipher.c | 31 if (areq->cryptlen < crypto_skcipher_ivsize(tfm)) in sun8i_ce_cipher_need_fallback() 34 if (areq->cryptlen == 0 || areq->cryptlen % 16) in sun8i_ce_cipher_need_fallback() 70 areq->cryptlen, areq->iv); in sun8i_ce_cipher_fallback() 101 areq->cryptlen, in sun8i_ce_cipher_prepare() 122 cet->t_dlen = cpu_to_le32(areq->cryptlen); in sun8i_ce_cipher_prepare() 124 cet->t_dlen = cpu_to_le32(areq->cryptlen / 4); in sun8i_ce_cipher_prepare() 165 offset = areq->cryptlen - ivsize; in sun8i_ce_cipher_prepare() 206 len = areq->cryptlen; in sun8i_ce_cipher_prepare() 212 areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo); in sun8i_ce_cipher_prepare() 221 len = areq->cryptlen; in sun8i_ce_cipher_prepare() [all …]
|
/Linux-v5.10/drivers/crypto/cavium/nitrox/ |
D | nitrox_aead.c | 166 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq() 167 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq() 228 rctx->cryptlen = areq->cryptlen; in nitrox_aes_gcm_enc() 230 rctx->srclen = areq->assoclen + areq->cryptlen; in nitrox_aes_gcm_enc() 262 rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_aes_gcm_dec() 264 rctx->srclen = areq->cryptlen + areq->assoclen; in nitrox_aes_gcm_dec() 450 aead_rctx->cryptlen = areq->cryptlen; in nitrox_rfc4106_enc() 452 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; in nitrox_rfc4106_enc() 482 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; in nitrox_rfc4106_dec() 485 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; in nitrox_rfc4106_dec()
|
/Linux-v5.10/arch/arm64/crypto/ |
D | aes-glue.c | 276 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() 286 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt() 287 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt() 302 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt() 305 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); in cts_cbc_encrypt() 308 subreq.cryptlen); in cts_cbc_encrypt() 313 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt() 333 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt() 343 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt() 344 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt() [all …]
|
/Linux-v5.10/drivers/crypto/allwinner/sun4i-ss/ |
D | sun4i-ss-cipher.c | 30 unsigned int ileft = areq->cryptlen; in sun4i_ss_opti_poll() 31 unsigned int oleft = areq->cryptlen; in sun4i_ss_opti_poll() 37 if (!areq->cryptlen) in sun4i_ss_opti_poll() 70 ileft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 71 oleft = areq->cryptlen / 4; in sun4i_ss_opti_poll() 131 areq->cryptlen, areq->iv); in sun4i_ss_cipher_poll_fallback() 161 unsigned int ileft = areq->cryptlen; in sun4i_ss_cipher_poll() 162 unsigned int oleft = areq->cryptlen; in sun4i_ss_cipher_poll() 172 if (!areq->cryptlen) in sun4i_ss_cipher_poll() 181 if (areq->cryptlen % algt->alg.crypto.base.cra_blocksize) in sun4i_ss_cipher_poll() [all …]
|
/Linux-v5.10/drivers/crypto/allwinner/sun8i-ss/ |
D | sun8i-ss-cipher.c | 28 if (areq->cryptlen == 0 || areq->cryptlen % 16) in sun8i_ss_need_fallback() 87 areq->cryptlen, areq->iv); in sun8i_ss_cipher_fallback() 115 areq->cryptlen, in sun8i_ss_cipher() 148 offset = areq->cryptlen - ivsize; in sun8i_ss_cipher() 187 len = areq->cryptlen; in sun8i_ss_cipher() 197 areq->cryptlen, i, rctx->t_src[i].len, sg->offset, todo); in sun8i_ss_cipher() 209 len = areq->cryptlen; in sun8i_ss_cipher() 219 areq->cryptlen, i, rctx->t_dst[i].len, sg->offset, todo); in sun8i_ss_cipher() 248 offset = areq->cryptlen - ivsize; in sun8i_ss_cipher()
|
/Linux-v5.10/drivers/crypto/qce/ |
D | skcipher.c | 78 rctx->cryptlen = req->cryptlen; in qce_skcipher_async_req_handle() 84 rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); in qce_skcipher_async_req_handle() 86 rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in qce_skcipher_async_req_handle() 109 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); in qce_skcipher_async_req_handle() 146 ret = qce_start(async_req, tmpl->crypto_alg_type, req->cryptlen, 0); in qce_skcipher_async_req_handle() 236 req->cryptlen <= aes_sw_max_len) || in qce_skcipher_crypt() 237 (IS_XTS(rctx->flags) && req->cryptlen > QCE_SECTOR_SIZE && in qce_skcipher_crypt() 238 req->cryptlen % QCE_SECTOR_SIZE))) { in qce_skcipher_crypt() 245 req->dst, req->cryptlen, req->iv); in qce_skcipher_crypt()
|
/Linux-v5.10/drivers/crypto/virtio/ |
D | virtio_crypto_algs.c | 359 src_nents = sg_nents_for_len(req->src, req->cryptlen); in __virtio_crypto_skcipher_do_req() 401 cpu_to_le32(req->cryptlen); in __virtio_crypto_skcipher_do_req() 410 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 412 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 414 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req() 443 req->cryptlen - AES_BLOCK_SIZE, in __virtio_crypto_skcipher_do_req() 493 if (!req->cryptlen) in virtio_crypto_skcipher_encrypt() 495 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_encrypt() 518 if (!req->cryptlen) in virtio_crypto_skcipher_decrypt() 520 if (req->cryptlen % AES_BLOCK_SIZE) in virtio_crypto_skcipher_decrypt() [all …]
|
/Linux-v5.10/drivers/crypto/amlogic/ |
D | amlogic-gxl-cipher.c | 30 if (areq->cryptlen == 0) in meson_cipher_need_fallback() 75 areq->cryptlen, areq->iv); in meson_cipher_do_fallback() 108 areq->cryptlen, in meson_cipher() 131 if (ivsize > areq->cryptlen) { in meson_cipher() 132 dev_err(mc->dev, "invalid ivsize=%d vs len=%d\n", ivsize, areq->cryptlen); in meson_cipher() 144 offset = areq->cryptlen - ivsize; in meson_cipher() 205 len = areq->cryptlen; in meson_cipher() 250 areq->cryptlen - ivsize, in meson_cipher()
|
/Linux-v5.10/drivers/crypto/xilinx/ |
D | zynqmp-aes-gcm.c | 93 dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE in zynqmp_aes_aead_cipher() 96 dma_size = req->cryptlen + GCM_AES_IV_SIZE; in zynqmp_aes_aead_cipher() 109 data_size = req->cryptlen; in zynqmp_aes_aead_cipher() 110 scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0); in zynqmp_aes_aead_cipher() 191 req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) { in zynqmp_fallback_check() 194 if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0) in zynqmp_fallback_check() 198 req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) { in zynqmp_fallback_check() 224 areq->cryptlen, areq->iv); in zynqmp_handle_aes_req()
|
/Linux-v5.10/include/linux/ |
D | crypto.h | 511 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); 512 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); 526 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); 527 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); 533 static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) in crypto_stats_aead_encrypt() argument 535 static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) in crypto_stats_aead_decrypt() argument 563 static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg … in crypto_stats_skcipher_encrypt() argument 565 static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg … in crypto_stats_skcipher_decrypt() argument
|
/Linux-v5.10/include/crypto/ |
D | aead.h | 91 unsigned int cryptlen; member 499 unsigned int cryptlen, u8 *iv) in aead_request_set_crypt() argument 503 req->cryptlen = cryptlen; in aead_request_set_crypt()
|