Home
last modified time | relevance | path

Searched refs:XTMP2 (Results 1 – 4 of 4) sorted by relevance

/Linux-v6.1/arch/x86/crypto/
Dsha256-avx-asm.S87 XTMP2 = %xmm2 define
182 vpsrld $7, XTMP1, XTMP2
189 vpor XTMP2, XTMP3, XTMP3 # XTMP1 = W[-15] MY_ROR 7
199 vpsrld $18, XTMP1, XTMP2 #
215 vpxor XTMP2, XTMP3, XTMP3 # XTMP1 = W[-15] MY_ROR 7 ^ W[-15] MY_ROR
224 vpshufd $0b11111010, X3, XTMP2 # XTMP2 = W[-2] {BBAA}
239 vpsrld $10, XTMP2, XTMP4 # XTMP4 = W[-2] >> 10 {BBAA}
241 vpsrlq $19, XTMP2, XTMP3 # XTMP3 = W[-2] MY_ROR 19 {xBxA}
244 vpsrlq $17, XTMP2, XTMP2 # XTMP2 = W[-2] MY_ROR 17 {xBxA}
249 vpxor XTMP3, XTMP2, XTMP2 #
[all …]
Dsha256-ssse3-asm.S81 XTMP2 = %xmm2 define
171 movdqa XTMP1, XTMP2 # XTMP2 = W[-15]
183 psrld $7, XTMP2 #
186 por XTMP2, XTMP1 # XTMP1 = W[-15] ror 7
191 movdqa XTMP3, XTMP2 # XTMP2 = W[-15]
203 psrld $18, XTMP2 #
215 pxor XTMP2, XTMP1 # XTMP1 = W[-15] ror 7 ^ W[-15] ror 18
224 pshufd $0b11111010, X3, XTMP2 # XTMP2 = W[-2] {BBAA}
232 movdqa XTMP2, XTMP3 # XTMP3 = W[-2] {BBAA}
236 movdqa XTMP2, XTMP4 # XTMP4 = W[-2] {BBAA}
[all …]
Dsha256-avx2-asm.S80 XTMP2 = %ymm2 define
182 vpsrld $7, XTMP1, XTMP2
193 vpor XTMP2, XTMP3, XTMP3 # XTMP3 = W[-15] ror 7
195 vpsrld $18, XTMP1, XTMP2
233 vpxor XTMP2, XTMP3, XTMP3 # XTMP3 = W[-15] ror 7 ^ W[-15] ror 18
240 vpshufd $0b11111010, X3, XTMP2 # XTMP2 = W[-2] {BBAA}
249 vpsrld $10, XTMP2, XTMP4 # XTMP4 = W[-2] >> 10 {BBAA}
261 vpsrlq $19, XTMP2, XTMP3 # XTMP3 = W[-2] ror 19 {xBxA}
269 vpsrlq $17, XTMP2, XTMP2 # XTMP2 = W[-2] ror 17 {xBxA}
273 vpxor XTMP3, XTMP2, XTMP2
[all …]
Dsm3-avx-asm_64.S125 #define XTMP2 %xmm8 macro
242 vmovdqu 2*16(RDATA), XTMP2; /* XTMP2: w11, w10, w9, w8 */ \
246 vpshufb BSWAP_REG, XTMP2, XTMP2; \
249 vpxor XTMP1, XTMP2, XTMP5; \
250 vpxor XTMP2, XTMP3, XTMP6; \
258 vmovdqa XTMP2, IW_W1_ADDR(8, 0); \
265 vpalignr $12, XTMP1, XTMP2, W3; /* W3: xx, w9, w8, w7 */ \
266 vpalignr $8, XTMP2, XTMP3, W4; /* W4: xx, w12, w11, w10 */ \
284 vpslld $15, w5, XTMP2; \
286 vpxor XTMP2, XTMP3, XTMP3; \
[all …]