/Linux-v4.19/fs/ext4/ |
D | hash.c | 46 #define K2 013240474631UL macro 67 ROUND(G, a, b, c, d, in[1] + K2, 3); in half_md4_transform() 68 ROUND(G, d, a, b, c, in[3] + K2, 5); in half_md4_transform() 69 ROUND(G, c, d, a, b, in[5] + K2, 9); in half_md4_transform() 70 ROUND(G, b, c, d, a, in[7] + K2, 13); in half_md4_transform() 71 ROUND(G, a, b, c, d, in[0] + K2, 3); in half_md4_transform() 72 ROUND(G, d, a, b, c, in[2] + K2, 5); in half_md4_transform() 73 ROUND(G, c, d, a, b, in[4] + K2, 9); in half_md4_transform() 74 ROUND(G, b, c, d, a, in[6] + K2, 13); in half_md4_transform() 95 #undef K2
|
/Linux-v4.19/crypto/ |
D | anubis.c | 493 u32 K0, K1, K2, K3; in anubis_setkey() local 499 K2 = T4[(kappa[N - 1] >> 8) & 0xff]; in anubis_setkey() 512 K2 = T4[(kappa[i] >> 8) & 0xff] ^ in anubis_setkey() 513 (T5[(K2 >> 24) ] & 0xff000000U) ^ in anubis_setkey() 514 (T5[(K2 >> 16) & 0xff] & 0x00ff0000U) ^ in anubis_setkey() 515 (T5[(K2 >> 8) & 0xff] & 0x0000ff00U) ^ in anubis_setkey() 516 (T5[(K2 ) & 0xff] & 0x000000ffU); in anubis_setkey() 526 ctx->E[r][2] = K2; in anubis_setkey()
|
D | rmd256.c | 32 #define K2 RMD_K2 macro 106 ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); in rmd256_transform() 107 ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); in rmd256_transform() 108 ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); in rmd256_transform() 109 ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); in rmd256_transform() 110 ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); in rmd256_transform() 111 ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); in rmd256_transform() 112 ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); in rmd256_transform() 113 ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); in rmd256_transform() 114 ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); in rmd256_transform() [all …]
|
D | rmd128.c | 32 #define K2 RMD_K2 macro 85 ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); in rmd128_transform() 86 ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); in rmd128_transform() 87 ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); in rmd128_transform() 88 ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); in rmd128_transform() 89 ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); in rmd128_transform() 90 ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); in rmd128_transform() 91 ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); in rmd128_transform() 92 ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); in rmd128_transform() 93 ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); in rmd128_transform() [all …]
|
D | rmd160.c | 32 #define K2 RMD_K2 macro 91 ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); in rmd160_transform() 92 ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); in rmd160_transform() 93 ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); in rmd160_transform() 94 ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); in rmd160_transform() 95 ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); in rmd160_transform() 96 ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); in rmd160_transform() 97 ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); in rmd160_transform() 98 ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); in rmd160_transform() 99 ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); in rmd160_transform() [all …]
|
D | rmd320.c | 32 #define K2 RMD_K2 macro 112 ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); in rmd320_transform() 113 ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); in rmd320_transform() 114 ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); in rmd320_transform() 115 ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); in rmd320_transform() 116 ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); in rmd320_transform() 117 ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); in rmd320_transform() 118 ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); in rmd320_transform() 119 ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); in rmd320_transform() 120 ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); in rmd320_transform() [all …]
|
D | khazad.c | 763 u64 K2, K1; in khazad_setkey() local 766 K2 = ((u64)be32_to_cpu(key[0]) << 32) | be32_to_cpu(key[1]); in khazad_setkey() 779 c[r] ^ K2; in khazad_setkey() 780 K2 = K1; in khazad_setkey()
|
/Linux-v4.19/arch/x86/crypto/ |
D | sha1_avx2_x86_64_asm.S | 685 #define K2 0x6ed9eba1 macro 693 .long K2, K2, K2, K2 694 .long K2, K2, K2, K2
|
D | sha1_ssse3_asm.S | 437 #define K2 0x6ed9eba1 macro 446 .long K2, K2, K2, K2
|
D | crc32c-pcl-intel-asm_64.S | 223 pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2 228 PCLMULQDQ 0x00,%xmm0,%xmm1 # Multiply by K2
|
D | serpent-sse2-x86_64-asm_64.S | 392 #define K2(x0, x1, x2, x3, x4, i) \ macro 651 K2(RA, RB, RC, RD, RE, 0); 683 S(S7, RD, RE, RB, RC, RA); K2(RA, RB, RC, RD, RE, 32); 715 K2(RA, RB, RC, RD, RE, 32); 747 S(SI0, RE, RB, RC, RA, RD); K2(RC, RD, RB, RE, RA, 0);
|
D | serpent-avx-x86_64-asm_64.S | 389 #define K2(x0, x1, x2, x3, x4, i) \ macro 586 K2(RA, RB, RC, RD, RE, 0); 618 S(S7, RD, RE, RB, RC, RA); K2(RA, RB, RC, RD, RE, 32); 640 K2(RA, RB, RC, RD, RE, 32); 672 S(SI0, RE, RB, RC, RA, RD); K2(RC, RD, RB, RE, RA, 0);
|
D | serpent-avx2-asm_64.S | 385 #define K2(x0, x1, x2, x3, x4, i) \ macro 582 K2(RA, RB, RC, RD, RE, 0); 614 S(S7, RD, RE, RB, RC, RA); K2(RA, RB, RC, RD, RE, 32); 636 K2(RA, RB, RC, RD, RE, 32); 668 S(SI0, RE, RB, RC, RA, RD); K2(RC, RD, RB, RE, RA, 0);
|
/Linux-v4.19/Documentation/arm/keystone/ |
D | Overview.txt | 42 All of the K2 SoCs/EVMs share a common defconfig, keystone_defconfig and same
|
/Linux-v4.19/arch/arm/crypto/ |
D | sha1-armv7-neon.S | 32 #define K2 0x6ED9EBA1 macro 38 .LK2: .long K2, K2, K2, K2
|
/Linux-v4.19/arch/arm/boot/dts/ |
D | s3c6410-mini6410.dts | 83 label = "K2";
|
/Linux-v4.19/drivers/gpu/drm/tegra/ |
D | dc.h | 751 #define K2(x) (((x) & 0xff) << 16) macro
|
D | hub.c | 446 value = K2(255) | K1(255) | WINDOW_LAYER_DEPTH(255 - zpos); in tegra_shared_plane_atomic_update()
|
D | dc.c | 297 value = K2(255) | K1(255) | WINDOW_LAYER_DEPTH(255 - window->zpos); in tegra_plane_setup_blending()
|
/Linux-v4.19/drivers/ata/ |
D | Kconfig | 464 tristate "ServerWorks Frodo / Apple K2 SATA support" 467 This option enables support for Broadcom/Serverworks/Apple K2
|
/Linux-v4.19/Documentation/s390/ |
D | vfio-ccw.txt | 250 K2. Translate the guest channel program to a host kernel space
|