| /Linux-v5.4/arch/arm64/crypto/ |
| D | aes-neonbs-glue.c | 57 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32]; 59 } __aligned(AES_BLOCK_SIZE); 108 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 109 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 113 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 120 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 167 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt() 168 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt() 176 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 190 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt() [all …]
|
| D | aes-glue.c | 128 u8 dg[AES_BLOCK_SIZE]; 201 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 206 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 221 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 226 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 239 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 244 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 268 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 273 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 294 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() [all …]
|
| D | ghash-ce-glue.c | 316 aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){}); in gcm_setkey() 404 u8 mac[AES_BLOCK_SIZE]; in gcm_final() 416 crypto_xor(tag, mac, AES_BLOCK_SIZE); in gcm_final() 424 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt() 425 u8 ks[2 * AES_BLOCK_SIZE]; in gcm_encrypt() 426 u8 tag[AES_BLOCK_SIZE]; in gcm_encrypt() 439 if (likely(crypto_simd_usable() && walk.total >= 2 * AES_BLOCK_SIZE)) { in gcm_encrypt() 447 pmull_gcm_encrypt_block(ks + AES_BLOCK_SIZE, iv, NULL, nrounds); in gcm_encrypt() 451 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2; in gcm_encrypt() 462 walk.nbytes % (2 * AES_BLOCK_SIZE)); in gcm_encrypt() [all …]
|
| D | aes-ce-ccm-glue.c | 70 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac() 88 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac() 101 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac() 114 if (*macp > 0 && *macp < AES_BLOCK_SIZE) { in ccm_update_mac() 115 int added = min(abytes, AES_BLOCK_SIZE - *macp); in ccm_update_mac() 124 while (abytes >= AES_BLOCK_SIZE) { in ccm_update_mac() 126 crypto_xor(mac, in, AES_BLOCK_SIZE); in ccm_update_mac() 128 in += AES_BLOCK_SIZE; in ccm_update_mac() 129 abytes -= AES_BLOCK_SIZE; in ccm_update_mac() 183 u8 buf[AES_BLOCK_SIZE]; in ccm_crypt_fallback() [all …]
|
| /Linux-v5.4/arch/arm/crypto/ |
| D | aes-neonbs-glue.c | 47 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE); 97 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt() 98 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt() 102 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt() 109 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt() 166 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt() 167 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt() 171 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt() 179 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt() 225 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt() [all …]
|
| D | aes-ce-glue.c | 54 u8 b[AES_BLOCK_SIZE]; 187 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 192 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 207 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 212 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 225 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk() 231 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk() 255 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk() 261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk() 281 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt() [all …]
|
| /Linux-v5.4/net/mac80211/ |
| D | fils_aead.c | 28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v() 35 crypto_shash_digest(desc, tmp, AES_BLOCK_SIZE, d); in aes_s2v() 41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v() 46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v() 49 crypto_shash_update(desc, addr[i], len[i] - AES_BLOCK_SIZE); in aes_s2v() 50 crypto_xor(d, addr[i] + len[i] - AES_BLOCK_SIZE, in aes_s2v() 51 AES_BLOCK_SIZE); in aes_s2v() 60 crypto_shash_finup(desc, d, AES_BLOCK_SIZE, v); in aes_s2v() 71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt() 106 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt() [all …]
|
| /Linux-v5.4/drivers/crypto/nx/ |
| D | nx-aes-xcbc.c | 23 u8 state[AES_BLOCK_SIZE]; 25 u8 buffer[AES_BLOCK_SIZE]; 64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty() 71 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty() 72 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty() 107 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty() 114 len = AES_BLOCK_SIZE; in nx_xcbc_empty() 118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty() 132 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty() 189 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update() [all …]
|
| D | nx-aes-gcm.c | 112 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca() 155 AES_BLOCK_SIZE); in nx_gca() 164 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE); in nx_gca() 193 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, desc->info, AES_BLOCK_SIZE); in gmac() 225 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac() 227 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac() 250 char out[AES_BLOCK_SIZE]; in gcm_empty() 268 len = AES_BLOCK_SIZE; in gcm_empty() 274 if (len != AES_BLOCK_SIZE) in gcm_empty() 380 memcpy(desc.info, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() [all …]
|
| /Linux-v5.4/arch/s390/crypto/ |
| D | paes_s390.c | 169 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_paes_crypt() 171 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt() 209 .cra_blocksize = AES_BLOCK_SIZE, 286 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt() 291 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); in cbc_paes_crypt() 293 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in cbc_paes_crypt() 295 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_paes_crypt() 306 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt() 335 .cra_blocksize = AES_BLOCK_SIZE, 346 .ivsize = AES_BLOCK_SIZE, [all …]
|
| D | aes_s390.c | 65 u8 buf[AES_BLOCK_SIZE]; 119 cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE); in crypto_aes_encrypt() 131 &sctx->key, out, in, AES_BLOCK_SIZE); in crypto_aes_decrypt() 165 .cra_blocksize = AES_BLOCK_SIZE, 266 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_aes_crypt() 268 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 335 .cra_blocksize = AES_BLOCK_SIZE, 380 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt() 385 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); in cbc_aes_crypt() 387 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in cbc_aes_crypt() [all …]
|
| /Linux-v5.4/drivers/crypto/ccp/ |
| D | ccp-crypto.h | 107 u8 k1[AES_BLOCK_SIZE]; 108 u8 k2[AES_BLOCK_SIZE]; 113 u8 iv[AES_BLOCK_SIZE]; 116 u8 tag[AES_BLOCK_SIZE]; 120 u8 rfc3686_iv[AES_BLOCK_SIZE]; 138 u8 iv[AES_BLOCK_SIZE]; 142 u8 buf[AES_BLOCK_SIZE]; 146 u8 pad[AES_BLOCK_SIZE]; 154 u8 iv[AES_BLOCK_SIZE]; 157 u8 buf[AES_BLOCK_SIZE]; [all …]
|
| D | ccp-crypto-aes.c | 32 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 80 (req->nbytes & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt() 87 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_crypt() 89 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt() 219 .cra_blocksize = AES_BLOCK_SIZE, 272 .blocksize = AES_BLOCK_SIZE, 281 .blocksize = AES_BLOCK_SIZE, 282 .ivsize = AES_BLOCK_SIZE, 291 .ivsize = AES_BLOCK_SIZE, 300 .ivsize = AES_BLOCK_SIZE, [all …]
|
| /Linux-v5.4/drivers/crypto/vmx/ |
| D | aes_ctr.c | 73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final() 87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final() 107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt() 120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 147 .ivsize = AES_BLOCK_SIZE, 148 .chunksize = AES_BLOCK_SIZE,
|
| D | aes_xts.c | 84 u8 tweak[AES_BLOCK_SIZE]; in p8_aes_xts_crypt() 117 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 122 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_xts_crypt() 128 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_xts_crypt() 149 .base.cra_blocksize = AES_BLOCK_SIZE, 158 .ivsize = AES_BLOCK_SIZE,
|
| D | aes_cbc.c | 95 round_down(nbytes, AES_BLOCK_SIZE), in p8_aes_cbc_crypt() 102 ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in p8_aes_cbc_crypt() 123 .base.cra_blocksize = AES_BLOCK_SIZE, 132 .ivsize = AES_BLOCK_SIZE,
|
| /Linux-v5.4/drivers/crypto/ccree/ |
| D | cc_cipher.c | 120 if (size >= AES_BLOCK_SIZE) in validate_data_size() 130 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) in validate_data_size() 944 .ivsize = AES_BLOCK_SIZE, 962 .ivsize = AES_BLOCK_SIZE, 981 .ivsize = AES_BLOCK_SIZE, 993 .blocksize = AES_BLOCK_SIZE, 1000 .ivsize = AES_BLOCK_SIZE, 1011 .blocksize = AES_BLOCK_SIZE, 1018 .ivsize = AES_BLOCK_SIZE, 1030 .blocksize = AES_BLOCK_SIZE, [all …]
|
| D | cc_aead.h | 17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3) 52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned; 55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned; 56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned; 57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
|
| /Linux-v5.4/drivers/crypto/ |
| D | padlock-aes.c | 34 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE) 38 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE) 215 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy() 218 memcpy(tmp, in, count * AES_BLOCK_SIZE); in ecb_crypt_copy() 229 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy() 232 memcpy(tmp, in, count * AES_BLOCK_SIZE); in cbc_crypt_copy() 326 .cra_blocksize = AES_BLOCK_SIZE, 357 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt() 358 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt() 383 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt() [all …]
|
| D | atmel-aes.c | 120 u32 iv[AES_BLOCK_SIZE / sizeof(u32)]; 132 u32 j0[AES_BLOCK_SIZE / sizeof(u32)]; 133 u32 tag[AES_BLOCK_SIZE / sizeof(u32)]; 134 u32 ghash[AES_BLOCK_SIZE / sizeof(u32)]; 157 u32 lastc[AES_BLOCK_SIZE / sizeof(u32)]; 393 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block() 399 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block() 581 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer() 583 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer() 612 size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE); in atmel_aes_cpu_start() [all …]
|
| D | omap-aes-gcm.c | 45 alen = ALIGN(dd->assoc_len, AES_BLOCK_SIZE); in omap_aes_gcm_done_task() 46 clen = ALIGN(dd->total, AES_BLOCK_SIZE); in omap_aes_gcm_done_task() 106 alen = ALIGN(assoclen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 107 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers() 117 AES_BLOCK_SIZE, dd->in_sgl, in omap_aes_gcm_copy_buffers() 129 AES_BLOCK_SIZE, &dd->in_sgl[nsg], in omap_aes_gcm_copy_buffers() 152 AES_BLOCK_SIZE, &dd->out_sgl, in omap_aes_gcm_copy_buffers() 191 sg_init_one(&iv_sg, iv, AES_BLOCK_SIZE); in do_encrypt_iv() 192 sg_init_one(&tag_sg, tag, AES_BLOCK_SIZE); in do_encrypt_iv() 196 skcipher_request_set_crypt(sk_req, &iv_sg, &tag_sg, AES_BLOCK_SIZE, in do_encrypt_iv()
|
| /Linux-v5.4/arch/powerpc/crypto/ |
| D | aes-spe-glue.c | 195 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt() 223 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_decrypt() 251 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_encrypt() 279 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_decrypt() 303 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ppc_ctr_crypt() 308 nbytes : pbytes & ~(AES_BLOCK_SIZE - 1); in ppc_ctr_crypt() 339 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_encrypt() 370 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_decrypt() 398 .cra_blocksize = AES_BLOCK_SIZE, 416 .cra_blocksize = AES_BLOCK_SIZE, [all …]
|
| /Linux-v5.4/arch/sparc/crypto/ |
| D | aes_glue.c | 214 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) 238 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 268 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 298 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt() 329 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt() 341 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final() 347 keystream, AES_BLOCK_SIZE); in ctr_crypt_final() 349 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final() 361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt() 365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt() [all …]
|
| /Linux-v5.4/drivers/crypto/mediatek/ |
| D | mtk-aes.c | 19 & ~(AES_BLOCK_SIZE - 1)) 21 AES_BLOCK_SIZE * 2) 128 u32 iv[AES_BLOCK_SIZE / sizeof(u32)]; 187 len &= AES_BLOCK_SIZE - 1; in mtk_aes_padlen() 188 return len ? AES_BLOCK_SIZE - len : 0; in mtk_aes_padlen() 196 if (!IS_ALIGNED(len, AES_BLOCK_SIZE)) in mtk_aes_check_aligned() 204 if (!IS_ALIGNED(len, AES_BLOCK_SIZE)) in mtk_aes_check_aligned() 213 if (!IS_ALIGNED(sg->length, AES_BLOCK_SIZE)) in mtk_aes_check_aligned() 325 res->hdr += AES_BLOCK_SIZE; in mtk_aes_xmit() 454 AES_BLOCK_SIZE); in mtk_aes_info_init() [all …]
|
| /Linux-v5.4/drivers/crypto/cavium/nitrox/ |
| D | nitrox_skcipher.c | 333 .cra_blocksize = AES_BLOCK_SIZE, 340 .ivsize = AES_BLOCK_SIZE, 352 .cra_blocksize = AES_BLOCK_SIZE, 359 .ivsize = AES_BLOCK_SIZE, 371 .cra_blocksize = AES_BLOCK_SIZE, 378 .ivsize = AES_BLOCK_SIZE, 390 .cra_blocksize = AES_BLOCK_SIZE, 397 .ivsize = AES_BLOCK_SIZE, 428 .cra_blocksize = AES_BLOCK_SIZE, 436 .ivsize = AES_BLOCK_SIZE,
|