Searched refs:XTMP0 (Results 1 – 4 of 4) sorted by relevance
| /Linux-v6.1/arch/x86/crypto/ |
| D | sm3-avx-asm_64.S | 123 #define XTMP0 %xmm6 macro 240 vmovdqu 0*16(RDATA), XTMP0; /* XTMP0: w3, w2, w1, w0 */ \ 244 vpshufb BSWAP_REG, XTMP0, XTMP0; \ 248 vpxor XTMP0, XTMP1, XTMP4; \ 252 vmovdqa XTMP0, IW_W1_ADDR(0, 0); \ 262 vpshufd $0b00000000, XTMP0, W0; /* W0: xx, w0, xx, xx */ \ 263 vpshufd $0b11111001, XTMP0, W1; /* W1: xx, w3, w2, w1 */ \ 272 vpshufd $0b10111111, w0, XTMP0; \ 273 vpalignr $12, XTMP0, w1, XTMP0; /* XTMP0: xx, w2, w1, w0 */ \ 279 vpxor w3, XTMP0, XTMP0; [all …]
|
| D | sha256-avx-asm.S | 85 XTMP0 = %xmm0 define 160 vpalignr $4, X2, X3, XTMP0 # XTMP0 = W[-7] 167 vpaddd X0, XTMP0, XTMP0 # XTMP0 = W[-7] + W[-16] 227 vpaddd XTMP1, XTMP0, XTMP0 # XTMP0 = W[-16] + W[-7] + s0 261 vpaddd XTMP4, XTMP0, XTMP0 # XTMP0 = {..., ..., W[1], W[0]} 265 vpshufd $0b01010000, XTMP0, XTMP2 # XTMP2 = W[-2] {DDCC} 299 vpaddd XTMP0, XTMP5, X0 # X0 = {W[3], W[2], W[1], W[0]}
|
| D | sha256-avx2-asm.S | 78 XTMP0 = %ymm0 define 162 vpalignr $4, X2, X3, XTMP0 # XTMP0 = W[-7] 168 vpaddd X0, XTMP0, XTMP0 # XTMP0 = W[-7] + W[-16]# y1 = (e >> 6)# S1 244 vpaddd XTMP1, XTMP0, XTMP0 # XTMP0 = W[-16] + W[-7] + s0 285 vpaddd XTMP4, XTMP0, XTMP0 # XTMP0 = {..., ..., W[1], W[0]} 291 vpshufd $0b01010000, XTMP0, XTMP2 # XTMP2 = W[-2] {DDCC} 341 vpaddd XTMP0, XTMP5, X0 # X0 = {W[3], W[2], W[1], W[0]} 566 VMOVDQ 0*32(INP),XTMP0 572 vpshufb BYTE_FLIP_MASK, XTMP0, XTMP0 578 vperm2i128 $0x20, XTMP2, XTMP0, X0 [all …]
|
| D | sha256-ssse3-asm.S | 79 XTMP0 = %xmm0 define 150 movdqa X3, XTMP0 154 palignr $4, X2, XTMP0 # XTMP0 = W[-7] 162 paddd X0, XTMP0 # XTMP0 = W[-7] + W[-16] 227 paddd XTMP1, XTMP0 # XTMP0 = W[-16] + W[-7] + s0 264 paddd XTMP4, XTMP0 # XTMP0 = {..., ..., W[1], W[0]} 268 pshufd $0b01010000, XTMP0, XTMP2 # XTMP2 = W[-2] {BBAA} 305 paddd XTMP0, X0 # X0 = {W[3], W[2], W[1], W[0]}
|