/Linux-v6.1/arch/riscv/lib/ |
D | memmove.S | 54 add t4, a0, a2 69 andi t6, t4, -SZREG 163 sub a5, a4, t4 /* Find the difference between src and dest */ 198 addi t4, t4, (-2 * SZREG) 202 REG_S t2, ( 1 * SZREG)(t4) 204 beq t4, a2, 2f 211 REG_S t2, ( 0 * SZREG)(t4) 213 bne t4, t5, 1b 215 mv t4, t5 /* Fix the dest pointer in case the loop was broken */ 217 add a4, t4, a5 /* Restore the src pointer */ [all …]
|
D | uaccess.S | 84 fixup REG_L t4, 7*SZREG(a1), 10f 92 fixup REG_S t4, 7*SZREG(a0), 10f 127 sub t4, a5, t3 144 sll a2, a5, t4
|
/Linux-v6.1/arch/alpha/lib/ |
D | ev67-strrchr.S | 36 insbl a1, 1, t4 # U : 000000000000ch00 41 or t2, t4, a1 # E : 000000000000chch 48 sll a1, 48, t4 # U : chch000000000000 50 or t4, a1, a1 # E : chch00000000chch 56 mskqh t5, a0, t4 # E : Complete garbage mask 58 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 61 andnot t1, t4, t1 # E : clear garbage from null test 62 andnot t3, t4, t3 # E : clear garbage from char test 84 negq t1, t4 # E : isolate first null byte match 85 and t1, t4, t4 # E : [all …]
|
D | strrchr.S | 31 lda t4, -1 # .. e1 : build garbage mask 34 mskqh t4, a0, t4 # e0 : 37 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 39 andnot t1, t4, t1 # .. e1 : clear garbage from null test 40 andnot t3, t4, t3 # e0 : clear garbage from char test 56 negq t1, t4 # e0 : isolate first null byte match 57 and t1, t4, t4 # e1 : 58 subq t4, 1, t5 # e0 : build a mask of the bytes up to... 59 or t4, t5, t4 # e1 : ... and including the null 61 and t3, t4, t3 # e0 : mask out char matches after null
|
D | strchr.S | 27 lda t4, -1 # .. e1 : build garbage mask 30 mskqh t4, a0, t4 # e0 : 33 cmpbge zero, t4, t4 # .. e1 : bits set iff byte is garbage 38 andnot t0, t4, t0 # e0 : clear garbage bits 58 and t0, 0xaa, t4 # e0 : 61 cmovne t4, 1, t4 # .. e1 : 63 addq v0, t4, v0 # .. e1 :
|
D | ev67-strchr.S | 40 lda t4, -1 # E : build garbage mask 42 mskqh t4, a0, t4 # U : only want relevant part of first quad 50 cmpbge zero, t4, t4 # E : bits set iff byte is garbage 58 andnot t0, t4, t0 # E : clear garbage bits
|
/Linux-v6.1/arch/arm64/crypto/ |
D | crct10dif-ce-core.S | 84 t4 .req v18 136 ext t4.8b, ad.8b, ad.8b, #1 // A1 140 pmull t4.8h, t4.8b, fold_consts.8b // F = A1*B 150 tbl t4.16b, {ad.16b}, perm1.16b // A1 154 pmull2 t4.8h, t4.16b, fold_consts.16b // F = A1*B 162 0: eor t4.16b, t4.16b, t8.16b // L = E + F 166 uzp1 t8.2d, t4.2d, t5.2d 167 uzp2 t4.2d, t4.2d, t5.2d 173 eor t8.16b, t8.16b, t4.16b 174 and t4.16b, t4.16b, k32_48.16b [all …]
|
/Linux-v6.1/arch/x86/crypto/ |
D | camellia-aesni-avx2-asm_64.S | 62 #define roundsm32(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, \ argument 67 vbroadcasti128 .Linv_shift_row, t4; \ 75 vpshufb t4, x0, x0; \ 76 vpshufb t4, x7, x7; \ 77 vpshufb t4, x3, x3; \ 78 vpshufb t4, x6, x6; \ 79 vpshufb t4, x2, x2; \ 80 vpshufb t4, x5, x5; \ 81 vpshufb t4, x1, x1; \ 82 vpshufb t4, x4, x4; \ [all …]
|
D | camellia-aesni-avx-asm_64.S | 50 #define roundsm16(x0, x1, x2, x3, x4, x5, x6, x7, t0, t1, t2, t3, t4, t5, t6, \ argument 55 vmovdqa .Linv_shift_row, t4; \ 61 vpshufb t4, x0, x0; \ 62 vpshufb t4, x7, x7; \ 63 vpshufb t4, x1, x1; \ 64 vpshufb t4, x4, x4; \ 65 vpshufb t4, x2, x2; \ 66 vpshufb t4, x5, x5; \ 67 vpshufb t4, x3, x3; \ 68 vpshufb t4, x6, x6; \ [all …]
|
/Linux-v6.1/arch/ia64/lib/ |
D | memcpy.S | 36 # define t4 r22 macro 190 sub t4=r0,dst // t4 = -dst 194 shl t4=t4,3 // t4 = 8*(dst & 7) 202 mov pr=t4,0x38 // (p5,p4,p3)=(dst & 7) 225 mov t4=ip 228 adds t4=.memcpy_loops-1b,t4 243 add t4=t0,t4 255 mov b6=t4
|
D | copy_page_mck.S | 80 #define t4 r20 macro 85 #define t10 t4 // alias! 144 (p[D]) ld8 t4 = [src1], 3*8 // M1 156 (p[D]) st8 [dst1] = t4, 3*8
|
D | copy_page.S | 44 .rotr t1[PIPE_DEPTH], t2[PIPE_DEPTH], t3[PIPE_DEPTH], t4[PIPE_DEPTH], \ 79 (p[0]) ld8 t4[0]=[src2],16 80 (EPI) st8 [tgt2]=t4[PIPE_DEPTH-1],16
|
/Linux-v6.1/arch/arm/crypto/ |
D | sha256-armv4.pl | 51 $inp="r1"; $t4="r1"; 78 str $inp,[sp,#17*4] @ make room for $t4 95 str $inp,[sp,#17*4] @ make room for $t4 127 ldr $t4,[sp,#`($i+15)%16`*4] @ from future BODY_16_xx 144 @ ldr $t4,[sp,#`($i+14)%16`*4] 147 mov $t2,$t4,ror#$sigma1[0] 149 eor $t2,$t2,$t4,ror#$sigma1[1] 152 eor $t2,$t2,$t4,lsr#$sigma1[2] @ sigma1(X[i+14]) 153 ldr $t4,[sp,#`($i+9)%16`*4] 159 add $t1,$t1,$t4 @ X[i] [all …]
|
D | aes-cipher-core.S | 42 .macro __hround, out0, out1, in0, in1, in2, in3, t3, t4, enc, sz, op, oldcpsr 65 __select \t4, \in0, 3 68 __select \t4, \in2, 3 72 __load \t4, \t4, 3, \sz, \op 87 eor \out1, \out1, \t4, ror #8
|
/Linux-v6.1/lib/zlib_dfltcc/ |
D | dfltcc_util.h | 38 const Byte *t4 = op2 ? *op2 : NULL; in dfltcc() local 44 register const Byte *r4 __asm__("r4") = t4; in dfltcc() 60 t2 = r2; t3 = r3; t4 = r4; t5 = r5; in dfltcc() 67 *op2 = t4; in dfltcc()
|
/Linux-v6.1/arch/s390/include/asm/ |
D | syscall_wrapper.h | 12 #define SYSCALL_PT_ARG6(regs, m, t1, t2, t3, t4, t5, t6)\ argument 13 SYSCALL_PT_ARG5(regs, m, t1, t2, t3, t4, t5), \ 16 #define SYSCALL_PT_ARG5(regs, m, t1, t2, t3, t4, t5) \ argument 17 SYSCALL_PT_ARG4(regs, m, t1, t2, t3, t4), \ 20 #define SYSCALL_PT_ARG4(regs, m, t1, t2, t3, t4) \ argument 22 m(t4, (regs->gprs[5]))
|
D | stacktrace.h | 104 #define CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4) \ argument 106 long arg4 = (long)(t4)(a4) 107 #define CALL_LARGS_5(t1, a1, t2, a2, t3, a3, t4, a4, t5, a5) \ argument 108 CALL_LARGS_4(t1, a1, t2, a2, t3, a3, t4, a4); \
|
/Linux-v6.1/arch/mips/lib/ |
D | csum_partial.S | 33 #define t4 $12 macro 182 CSUM_BIGCHUNK1(src, 0x00, sum, t0, t1, t3, t4) 193 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 194 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4) 195 CSUM_BIGCHUNK(src, 0x40, sum, t0, t1, t3, t4) 196 CSUM_BIGCHUNK(src, 0x60, sum, t0, t1, t3, t4) 208 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 209 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4) 217 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4) 476 LOAD(t4, UNIT(4)(src)) [all …]
|
/Linux-v6.1/arch/riscv/include/asm/ |
D | compat.h | 50 compat_ulong_t t4; member 87 cregs->t4 = (compat_ulong_t) regs->t4; in regs_to_cregs() 124 regs->t4 = (unsigned long) cregs->t4; in cregs_to_regs()
|
/Linux-v6.1/arch/loongarch/mm/ |
D | page.S | 48 ld.d t4, a1, 32 61 st.d t4, a0, 32 63 ld.d t4, a1, 96 76 st.d t4, a0, -32
|
/Linux-v6.1/arch/riscv/kernel/ |
D | crash_save_regs.S | 42 REG_S t4, PT_T4(a0) /* x29 */ 49 csrr t4, CSR_CAUSE 54 REG_S t4, PT_CAUSE(a0)
|
D | mcount.S | 82 la t4, ftrace_stub 86 bne t1, t4, do_ftrace_graph_caller 95 bne t5, t4, do_trace
|
/Linux-v6.1/scripts/ |
D | makelst | 27 t4=`field 1 $t3` 29 t6=`printf "%lu" $((0x$t4 - 0x$t5))`
|
/Linux-v6.1/arch/riscv/kvm/ |
D | vcpu_switch.S | 48 la t4, __kvm_switch_return 61 csrrw t4, CSR_STVEC, t4 74 REG_S t4, (KVM_ARCH_HOST_STVEC)(a0) 104 REG_L t4, (KVM_ARCH_GUEST_T4)(a0) 148 REG_S t4, (KVM_ARCH_GUEST_T4)(a0) 156 REG_L t4, (KVM_ARCH_HOST_HSTATUS)(a0) 172 csrrw t4, CSR_HSTATUS, t4 181 REG_S t4, (KVM_ARCH_GUEST_HSTATUS)(a0)
|
/Linux-v6.1/arch/sparc/lib/ |
D | blockops.S | 28 #define MIRROR_BLOCK(dst, src, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 31 ldd [src + offset + 0x08], t4; \ 35 std t4, [dst + offset + 0x08]; \
|