/Linux-v4.19/arch/x86/crypto/ |
D | morus640-sse2-asm.S | 29 #define T0 %xmm6 macro 50 movdqa \s1, T0 51 pand \s2, T0 52 pxor T0, \s0 54 movdqa \s0, T0 55 pslld $\b, T0 57 pxor T0, \s0 162 movq (%r8), T0 163 pxor T0, MSG 185 movq T0, %r10 [all …]
|
D | morus1280-avx2-asm.S | 31 #define T0 %ymm6 macro 54 vpand \s1, \s2, T0 55 vpxor T0, \s0, \s0 57 vpsllq $\b, \s0, T0 59 vpxor T0, \s0, \s0 196 vpermq $MASK2, T0, T0 373 vmovdqa MSG, T0 374 vpxor STATE0, T0, T0 376 vpxor T1, T0, T0 378 vpxor T1, T0, T0 [all …]
|
D | aegis128l-aesni-asm.S | 25 #define T0 %xmm10 macro 116 movq (%r8), T0 117 pxor T0, MSG0 151 movdqu T0, (%r9) 152 movdqa T1, T0 158 movq T0, %r10 164 psrldq $8, T0 165 movq T0, %r10 201 movdqa STATE7, T0 209 aesenc T0, STATE6 [all …]
|
D | aegis256-aesni-asm.S | 22 #define T0 %xmm7 macro 108 movq (%r8), T0 109 pxor T0, MSG 131 movq T0, %r10 137 psrldq $8, T0 138 movq T0, %r10 174 movdqa STATE5, T0 180 aesenc T0, STATE4 423 movdqa MSG, T0 424 crypt\i T0 [all …]
|
D | aegis128-aesni-asm.S | 22 #define T0 %xmm6 macro 57 movdqa STATE4, T0 62 aesenc T0, STATE3 124 movq (%r8), T0 125 pxor T0, MSG 147 movq T0, %r10 153 psrldq $8, T0 154 movq T0, %r10 388 movdqa MSG, T0 389 pxor \s1, T0 [all …]
|
D | twofish-x86_64-asm_64-3way.S | 92 #define do16bit_ror(rot, op1, op2, T0, T1, tmp1, tmp2, ab, dst) \ argument 96 op1##l T0(CTX, tmp2, 4), dst ## d; \
|
D | camellia-x86_64-asm_64.S | 94 #define xor2ror16(T0, T1, tmp1, tmp2, ab, dst) \ argument 98 xorq T0(, tmp2, 8), dst; \
|
/Linux-v4.19/arch/mips/kvm/ |
D | entry.c | 31 #define T0 8 macro 38 #define T0 12 macro 305 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1); in kvm_mips_build_enter_guest() 306 UASM_i_MTC0(&p, T0, C0_EPC); in kvm_mips_build_enter_guest() 345 uasm_i_mfc0(&p, T0, C0_GUESTCTL1); in kvm_mips_build_enter_guest() 347 uasm_i_ext(&p, T1, T0, MIPS_GCTL1_ID_SHIFT, in kvm_mips_build_enter_guest() 349 uasm_i_ins(&p, T0, T1, MIPS_GCTL1_RID_SHIFT, in kvm_mips_build_enter_guest() 351 uasm_i_mtc0(&p, T0, C0_GUESTCTL1); in kvm_mips_build_enter_guest() 369 UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, cop0), K1); in kvm_mips_build_enter_guest() 370 UASM_i_LW(&p, T0, offsetof(struct mips_coproc, reg[MIPS_CP0_STATUS][0]), in kvm_mips_build_enter_guest() [all …]
|
/Linux-v4.19/arch/sparc/crypto/ |
D | aes_asm.S | 7 #define ENCRYPT_TWO_ROUNDS(KEY_BASE, I0, I1, T0, T1) \ argument 8 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \ 10 AES_EROUND01(KEY_BASE + 4, T0, T1, I0) \ 11 AES_EROUND23(KEY_BASE + 6, T0, T1, I1) 13 #define ENCRYPT_TWO_ROUNDS_2(KEY_BASE, I0, I1, I2, I3, T0, T1, T2, T3) \ argument 14 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \ 18 AES_EROUND01(KEY_BASE + 4, T0, T1, I0) \ 19 AES_EROUND23(KEY_BASE + 6, T0, T1, I1) \ 23 #define ENCRYPT_TWO_ROUNDS_LAST(KEY_BASE, I0, I1, T0, T1) \ argument 24 AES_EROUND01(KEY_BASE + 0, I0, I1, T0) \ [all …]
|
/Linux-v4.19/arch/arm64/crypto/ |
D | sha512-armv8.pl | 109 my ($T0,$T1,$T2)=(@X[($i-8)&15],@X[($i-9)&15],@X[($i-10)&15]); 110 $T0=@X[$i+3] if ($i<11); 142 eor $T0,$e,$e,ror#`$Sigma1[2]-$Sigma1[1]` 148 eor $t0,$t0,$T0,ror#$Sigma1[1] // Sigma1(e) 149 ror $T0,$a,#$Sigma0[0] 156 eor $t1,$T0,$t1,ror#$Sigma0[1] // Sigma0(a) 168 ror $T0,$a,#$Sigma0[0] 175 eor $T0,$T0,$a,ror#$Sigma0[1] 182 eor $t1,$T0,$a,ror#$Sigma0[2] // Sigma0(a) 463 my ($T0,$T1,$T2,$T3,$T4,$T5,$T6,$T7) = map("q$_",(4..7,16..19)); [all …]
|
/Linux-v4.19/arch/arm/crypto/ |
D | sha256-armv4.pl | 292 my ($T0,$T1,$T2,$T3,$T4,$T5)=("q8","q9","q10","q11","d24","d25"); 312 &vext_8 ($T0,@X[0],@X[1],4); # X[1..4] 320 &vshr_u32 ($T2,$T0,$sigma0[0]); 326 &vshr_u32 ($T1,$T0,$sigma0[2]); 329 &vsli_32 ($T2,$T0,32-$sigma0[0]); 332 &vshr_u32 ($T3,$T0,$sigma0[1]); 338 &vsli_32 ($T3,$T0,32-$sigma0[1]); 386 &vld1_32 ("{$T0}","[$Ktbl,:128]!"); 398 &vadd_i32 ($T0,$T0,@X[0]); 400 &vst1_32 ("{$T0}","[$Xfer,:128]!"); [all …]
|
/Linux-v4.19/crypto/ |
D | anubis.c | 52 static const u32 T0[256] = { variable 536 inter[i] = T0[(kappa[j--] >> 24) ]; in anubis_setkey() 564 T0[T4[(v >> 24) ] & 0xff] ^ in anubis_setkey() 596 T0[(state[0] >> 24) ] ^ in anubis_crypt() 602 T0[(state[0] >> 16) & 0xff] ^ in anubis_crypt() 608 T0[(state[0] >> 8) & 0xff] ^ in anubis_crypt() 614 T0[(state[0] ) & 0xff] ^ in anubis_crypt() 630 (T0[(state[0] >> 24) ] & 0xff000000U) ^ in anubis_crypt() 636 (T0[(state[0] >> 16) & 0xff] & 0xff000000U) ^ in anubis_crypt() 642 (T0[(state[0] >> 8) & 0xff] & 0xff000000U) ^ in anubis_crypt() [all …]
|
D | khazad.c | 38 static const u64 T0[256] = { variable 771 ctx->E[r] = T0[(int)(K1 >> 56) ] ^ in khazad_setkey() 787 ctx->D[r] = T0[(int)S[(int)(K1 >> 56) ] & 0xff] ^ in khazad_setkey() 813 state = T0[(int)(state >> 56) ] ^ in khazad_crypt() 824 state = (T0[(int)(state >> 56) ] & 0xff00000000000000ULL) ^ in khazad_crypt()
|
/Linux-v4.19/arch/mips/mm/ |
D | page.c | 45 #define T0 8 macro 475 build_copy_load(&buf, T0, off); in build_copy_page() 483 build_copy_store(&buf, T0, off); in build_copy_page() 497 build_copy_load(&buf, T0, off); in build_copy_page() 505 build_copy_store(&buf, T0, off); in build_copy_page() 523 build_copy_load(&buf, T0, off); in build_copy_page() 528 build_copy_store(&buf, T0, off); in build_copy_page() 541 build_copy_load(&buf, T0, off); in build_copy_page() 546 build_copy_store(&buf, T0, off); in build_copy_page() 565 build_copy_load(&buf, T0, off); in build_copy_page() [all …]
|
/Linux-v4.19/arch/x86/crypto/sha1-mb/ |
D | sha1_x8_avx2.S | 231 # ymm6 T0 BB 249 T0 = %ymm6 define 334 VMOVPS (inp0, IDX), T0 343 TRANSPOSE8 T0, T1, T2, T3, T4, T5, T6, T7, T8, T9 344 vpshufb F, T0, T0 345 vmovdqu T0, (I*8)*32(%rsp)
|
/Linux-v4.19/kernel/trace/ |
D | trace_irqsoff.c | 323 u64 T0, T1, delta; in check_critical_timing() local 327 T0 = data->preempt_timestamp; in check_critical_timing() 329 delta = T1-T0; in check_critical_timing()
|
D | trace_sched_wakeup.c | 444 u64 T0, T1, delta; in probe_wakeup_sched_switch() local 488 T0 = data->preempt_timestamp; in probe_wakeup_sched_switch() 490 delta = T1-T0; in probe_wakeup_sched_switch()
|
/Linux-v4.19/tools/perf/ |
D | builtin-sched.c | 295 u64 T0 = get_nsecs(), T1; in burn_nsecs() local 299 } while (T1 + sched->run_measurement_overhead < T0 + nsecs); in burn_nsecs() 314 u64 T0, T1, delta, min_delta = NSEC_PER_SEC; in calibrate_run_measurement_overhead() local 318 T0 = get_nsecs(); in calibrate_run_measurement_overhead() 321 delta = T1-T0; in calibrate_run_measurement_overhead() 331 u64 T0, T1, delta, min_delta = NSEC_PER_SEC; in calibrate_sleep_measurement_overhead() local 335 T0 = get_nsecs(); in calibrate_sleep_measurement_overhead() 338 delta = T1-T0; in calibrate_sleep_measurement_overhead() 738 u64 T0, T1, delta, avg_delta, fluct; in run_one_test() local 740 T0 = get_nsecs(); in run_one_test() [all …]
|
/Linux-v4.19/drivers/ata/ |
D | pata_octeon_cf.c | 224 unsigned int T0, Tkr, Td; in octeon_cf_set_dmamode() local 231 T0 = timing->cycle; in octeon_cf_set_dmamode() 247 oe_n = max(T0 - oe_a, Tkr); in octeon_cf_set_dmamode()
|
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/ |
D | sislands_smc.h | 374 uint32_t T0; member
|
/Linux-v4.19/drivers/gpu/drm/radeon/ |
D | sislands_smc.h | 374 uint32_t T0; member
|
D | si_dpm.c | 2501 dte_tables->T0 = cpu_to_be32(dte_data->t0); in si_initialize_smc_dte_tables()
|
/Linux-v4.19/tools/objtool/arch/x86/lib/ |
D | x86-opcode-map.txt | 1029 1: prefetch T0
|
/Linux-v4.19/tools/perf/util/intel-pt-decoder/ |
D | x86-opcode-map.txt | 1029 1: prefetch T0
|
/Linux-v4.19/arch/x86/lib/ |
D | x86-opcode-map.txt | 1029 1: prefetch T0
|