/Linux-v5.10/lib/crypto/ |
D | aes.c | 199 ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32)); in aes_expandkey() 202 u32 *rki = ctx->key_enc + (i * kwords); in aes_expandkey() 231 ctx->key_dec[0] = ctx->key_enc[key_len + 24]; in aes_expandkey() 232 ctx->key_dec[1] = ctx->key_enc[key_len + 25]; in aes_expandkey() 233 ctx->key_dec[2] = ctx->key_enc[key_len + 26]; in aes_expandkey() 234 ctx->key_dec[3] = ctx->key_enc[key_len + 27]; in aes_expandkey() 237 ctx->key_dec[i] = inv_mix_columns(ctx->key_enc[j]); in aes_expandkey() 238 ctx->key_dec[i + 1] = inv_mix_columns(ctx->key_enc[j + 1]); in aes_expandkey() 239 ctx->key_dec[i + 2] = inv_mix_columns(ctx->key_enc[j + 2]); in aes_expandkey() 240 ctx->key_dec[i + 3] = inv_mix_columns(ctx->key_enc[j + 3]); in aes_expandkey() [all …]
|
/Linux-v5.10/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 42 u32 key_enc[AES_MAX_KEYLENGTH_U32]; member 48 u32 key_enc[AES_MAX_KEYLENGTH_U32]; member 54 extern void ppc_encrypt_aes(u8 *out, const u8 *in, u32 *key_enc, u32 rounds); 56 extern void ppc_encrypt_ecb(u8 *out, const u8 *in, u32 *key_enc, u32 rounds, 60 extern void ppc_encrypt_cbc(u8 *out, const u8 *in, u32 *key_enc, u32 rounds, 64 extern void ppc_crypt_ctr (u8 *out, const u8 *in, u32 *key_enc, u32 rounds, 66 extern void ppc_encrypt_xts(u8 *out, const u8 *in, u32 *key_enc, u32 rounds, 71 extern void ppc_expand_key_128(u32 *key_enc, const u8 *key); 72 extern void ppc_expand_key_192(u32 *key_enc, const u8 *key); 73 extern void ppc_expand_key_256(u32 *key_enc, const u8 *key); [all …]
|
/Linux-v5.10/arch/arm64/crypto/ |
D | aes-ce-glue.c | 56 __aes_ce_encrypt(ctx->key_enc, dst, src, num_rounds(ctx)); in aes_cipher_encrypt() 85 struct aes_block *key_enc, *key_dec; in ce_aes_expandkey() local 95 ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32)); in ce_aes_expandkey() 99 u32 *rki = ctx->key_enc + (i * kwords); in ce_aes_expandkey() 128 key_enc = (struct aes_block *)ctx->key_enc; in ce_aes_expandkey() 132 key_dec[0] = key_enc[j]; in ce_aes_expandkey() 134 __aes_ce_invert(key_dec + i, key_enc + j); in ce_aes_expandkey() 135 key_dec[i] = key_enc[0]; in ce_aes_expandkey()
|
D | aes-glue.c | 186 ctx->key_enc, rounds, blocks); in ecb_encrypt() 224 ctx->key_enc, rounds, blocks, walk->iv); in cbc_encrypt_walk() 322 ctx->key_enc, rounds, walk.nbytes, walk.iv); in cts_cbc_encrypt() 415 ctx->key1.key_enc, rounds, blocks, in essiv_cbc_encrypt() 416 req->iv, ctx->key2.key_enc); in essiv_cbc_encrypt() 438 req->iv, ctx->key2.key_enc); in essiv_cbc_decrypt() 458 ctx->key_enc, rounds, blocks, walk.iv); in ctr_encrypt() 474 aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds, in ctr_encrypt() 550 ctx->key1.key_enc, rounds, nbytes, in xts_encrypt() 551 ctx->key2.key_enc, walk.iv, first); in xts_encrypt() [all …]
|
D | aes-ce-ccm-glue.c | 104 ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc, in ccm_update_mac() 254 walk.nbytes - tail, ctx->key_enc, in ccm_encrypt() 262 ce_aes_ccm_final(mac, buf, ctx->key_enc, in ccm_encrypt() 312 walk.nbytes - tail, ctx->key_enc, in ccm_decrypt() 320 ce_aes_ccm_final(mac, buf, ctx->key_enc, in ccm_decrypt()
|
D | aes-neonbs-glue.c | 91 aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds); in aesbs_setkey() 149 memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc)); in aesbs_cbc_setkey() 152 aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds); in aesbs_cbc_setkey() 223 aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds); in aesbs_ctr_setkey_sync() 290 memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey)); in aesbs_xts_setkey() 413 neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds, in __xts_crypt()
|
D | aes-cipher-glue.c | 20 __aes_arm64_encrypt(ctx->key_enc, out, in, rounds); in aes_arm64_encrypt()
|
D | ghash-ce-glue.c | 383 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_encrypt() 497 dg, iv, ctx->aes_key.key_enc, nrounds, in gcm_decrypt()
|
/Linux-v5.10/arch/arm/crypto/ |
D | aes-ce-glue.c | 80 struct aes_block *key_enc, *key_dec; in ce_aes_expandkey() local 90 ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32)); in ce_aes_expandkey() 94 u32 *rki = ctx->key_enc + (i * kwords); in ce_aes_expandkey() 124 key_enc = (struct aes_block *)ctx->key_enc; in ce_aes_expandkey() 128 key_dec[0] = key_enc[j]; in ce_aes_expandkey() 130 ce_aes_invert(key_dec + i, key_enc + j); in ce_aes_expandkey() 131 key_dec[i] = key_enc[0]; in ce_aes_expandkey() 180 ctx->key_enc, num_rounds(ctx), blocks); in ecb_encrypt() 218 ctx->key_enc, num_rounds(ctx), blocks, in cbc_encrypt_walk() 318 ctx->key_enc, num_rounds(ctx), walk.nbytes, in cts_cbc_encrypt() [all …]
|
D | aes-cipher-glue.c | 21 __aes_arm_encrypt(ctx->key_enc, rounds, in, out); in aes_arm_encrypt()
|
D | aes-neonbs-glue.c | 79 aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds); in aesbs_setkey() 138 aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds); in aesbs_cbc_setkey() 225 aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds); in aesbs_ctr_setkey_sync()
|
/Linux-v5.10/crypto/ |
D | aes_generic.c | 1180 const u32 *kp = ctx->key_enc + 4; in crypto_aes_encrypt() 1183 b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in); in crypto_aes_encrypt() 1184 b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4); in crypto_aes_encrypt() 1185 b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8); in crypto_aes_encrypt() 1186 b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12); in crypto_aes_encrypt()
|
/Linux-v5.10/include/crypto/ |
D | aes.h | 26 u32 key_enc[AES_MAX_KEYLENGTH_U32]; member
|
/Linux-v5.10/drivers/crypto/inside-secure/ |
D | safexcel_cipher.c | 386 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) { in safexcel_skcipher_aes_setkey() 394 ctx->key[i] = cpu_to_le32(aes.key_enc[i]); in safexcel_skcipher_aes_setkey() 1431 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) { in safexcel_skcipher_aesctr_setkey() 1439 ctx->key[i] = cpu_to_le32(aes.key_enc[i]); in safexcel_skcipher_aesctr_setkey() 2469 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) { in safexcel_skcipher_aesxts_setkey() 2477 ctx->key[i] = cpu_to_le32(aes.key_enc[i]); in safexcel_skcipher_aesxts_setkey() 2487 aes.key_enc[i]) { in safexcel_skcipher_aesxts_setkey() 2496 cpu_to_le32(aes.key_enc[i]); in safexcel_skcipher_aesxts_setkey() 2578 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) { in safexcel_aead_gcm_setkey() 2586 ctx->key[i] = cpu_to_le32(aes.key_enc[i]); in safexcel_aead_gcm_setkey() [all …]
|
D | safexcel_hash.c | 2025 ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]); in safexcel_cbcmac_setkey() 2190 ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]); in safexcel_cmac_setkey()
|
/Linux-v5.10/drivers/crypto/ |
D | sa2ul.c | 452 ctx.key_enc[52] = ctx.key_enc[51] ^ ctx.key_enc[46]; in sa_aes_inv_key() 453 ctx.key_enc[53] = ctx.key_enc[52] ^ ctx.key_enc[47]; in sa_aes_inv_key() 472 memcpy(inv_key, &ctx.key_enc[key_pos], key_sz); in sa_aes_inv_key()
|
D | padlock-aes.c | 150 memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH); in aes_set_key()
|
/Linux-v5.10/drivers/crypto/marvell/cesa/ |
D | cipher.c | 267 ctx->aes.key_dec[4 + i] = ctx->aes.key_enc[offset + i]; in mv_cesa_aes_setkey() 700 key = ctx->aes.key_enc; in mv_cesa_aes_op()
|
/Linux-v5.10/include/linux/mlx5/ |
D | mlx5_ifc_fpga.h | 543 u8 key_enc[32]; member
|
/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/fpga/ |
D | ipsec.c | 468 memcpy(&hw_sa->ipsec_sa_v1.key_enc, aes_gcm->aes_key, in mlx5_fpga_ipsec_build_hw_xfrm() 472 memcpy(&hw_sa->ipsec_sa_v1.key_enc[16], in mlx5_fpga_ipsec_build_hw_xfrm()
|