Home
last modified time | relevance | path

Searched refs:xmm (Results 1 – 12 of 12) sorted by relevance

/Linux-v4.19/arch/x86/include/asm/
Dinst.h126 .macro XMM_NUM opd xmm
128 .ifc \xmm,%xmm0
131 .ifc \xmm,%xmm1
134 .ifc \xmm,%xmm2
137 .ifc \xmm,%xmm3
140 .ifc \xmm,%xmm4
143 .ifc \xmm,%xmm5
146 .ifc \xmm,%xmm6
149 .ifc \xmm,%xmm7
152 .ifc \xmm,%xmm8
[all …]
Dkvm_emulate.h250 unsigned xmm; member
/Linux-v4.19/arch/x86/crypto/
Daesni-intel_asm.S187 # states of %xmm registers %xmm6:%xmm15 not saved
188 # all %xmm registers are clobbered
203 # clobbers r12, and tmp xmm registers.
653 movups (\PLAIN_CYPH_IN), %xmm1 # If more than 16 bytes, just fill xmm
800 movdqu AadHash(%arg2), %xmm\i # XMM0 = Y0
813 movdqa \XMM0, %xmm\index
815 MOVADQ \XMM0, %xmm\index
817 PSHUFB_XMM %xmm14, %xmm\index # perform a 16 byte swap
818 pxor \TMP2, %xmm\index
828 AESENC \TMP1, %xmm\index
[all …]
Daes_ctrby8_avx-x86_64.S136 var_xdata = %xmm\n
Dsha256-avx-asm.S72 # COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask
73 # Load xmm with mem and byte swap each dword
Dsha256-ssse3-asm.S65 # COPY_XMM_AND_BSWAP xmm, [mem], byte_flip_mask
66 # Load xmm with mem and byte swap each dword
Daesni-intel_avx-x86_64.S78 ## padded AAD in xmm register = {A1 A0 0 0}
94 ## padded AAD in xmm register = {A2 A1 A0 0}
242 reg_\r = %xmm\n
1215 # clobbering all xmm registers
2540 # clobbering all xmm registers
/Linux-v4.19/arch/x86/include/uapi/asm/
Dkvm.h170 __u8 xmm[16][16]; member
/Linux-v4.19/tools/arch/x86/include/uapi/asm/
Dkvm.h170 __u8 xmm[16][16]; member
/Linux-v4.19/arch/x86/kvm/
Demulate.c1191 op->addr.xmm = reg; in decode_register_operand()
1242 op->addr.xmm = ctxt->modrm_rm; in decode_modrm()
1820 write_sse_reg(ctxt, &op->vec_val, op->addr.xmm); in writeback()
Dx86.c8370 memcpy(fpu->xmm, fxsave->xmm_space, sizeof fxsave->xmm_space); in kvm_arch_vcpu_ioctl_get_fpu()
8391 memcpy(fxsave->xmm_space, fpu->xmm, sizeof fxsave->xmm_space); in kvm_arch_vcpu_ioctl_set_fpu()
/Linux-v4.19/Documentation/virtual/kvm/
Dapi.txt609 __u8 xmm[16][16];
635 __u8 xmm[16][16];