Lines Matching +full:0 +full:x66
22 #define __FORCE_ORDER "m"(*(unsigned int *)0x1000UL)
29 asm volatile("mov %%cr0,%0\n\t" : "=r" (val) : __FORCE_ORDER); in native_read_cr0()
36 asm volatile("mov %%cr2,%0\n\t" : "=r" (val) : __FORCE_ORDER); in native_read_cr2()
42 asm volatile("mov %0,%%cr2": : "r" (val) : "memory"); in native_write_cr2()
48 asm volatile("mov %%cr3,%0\n\t" : "=r" (val) : __FORCE_ORDER); in __native_read_cr3()
54 asm volatile("mov %0,%%cr3": : "r" (val) : "memory"); in native_write_cr3()
63 * is functionally equivalent to CR4 == 0. Keep it simple and pretend in native_read_cr4()
64 * that CR4 == 0 on CPUs that don't have CR4. in native_read_cr4()
66 asm volatile("1: mov %%cr4, %0\n" in native_read_cr4()
69 : "=r" (val) : "0" (0), __FORCE_ORDER); in native_read_cr4()
72 asm volatile("mov %%cr4,%0\n\t" : "=r" (val) : __FORCE_ORDER); in native_read_cr4()
82 u32 ecx = 0; in rdpkru()
87 * clears EDX and requires that ecx=0. in rdpkru()
89 asm volatile(".byte 0x0f,0x01,0xee\n\t" in rdpkru()
97 u32 ecx = 0, edx = 0; in wrpkru()
101 * requires that ecx = edx = 0. in wrpkru()
103 asm volatile(".byte 0x0f,0x01,0xef\n\t" in wrpkru()
110 return 0; in rdpkru()
201 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); in clflush()
206 alternative_io(".byte 0x3e; clflush %P0", in clflushopt()
207 ".byte 0x66; clflush %P0", in clflushopt()
217 ".byte 0x3e; clflush (%[pax])", in clwb()
218 ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */ in clwb()
220 ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */ in clwb()
231 asm volatile(".byte 0xf, 0x1, 0xe8" ::: "memory"); in serialize()
251 asm volatile(".byte 0x66, 0x0f, 0x38, 0xf8, 0x02" in movdir64b()
264 * ZF = 0 equates to success, and ZF = 1 indicates retry or error.
269 * returns 0 on success and -EAGAIN on failure.
286 asm volatile(".byte 0xf3, 0x0f, 0x38, 0xf8, 0x02, 0x66, 0x90" in enqcmds()
295 return 0; in enqcmds()
304 asm volatile(".byte 0xc4, 0xe2, 0x78, 0x49, 0xc0"); in tile_release()