/Linux-v6.1/tools/testing/selftests/kvm/x86_64/ |
D | set_sregs_test.c | 32 if (orig->cr4 & feature_bit) in test_cr4_feature_bit() 36 sregs.cr4 |= feature_bit; in test_cr4_feature_bit() 48 uint64_t cr4; in calc_supported_cr4_feature_bits() local 50 cr4 = X86_CR4_VME | X86_CR4_PVI | X86_CR4_TSD | X86_CR4_DE | in calc_supported_cr4_feature_bits() 54 cr4 |= X86_CR4_UMIP; in calc_supported_cr4_feature_bits() 56 cr4 |= X86_CR4_LA57; in calc_supported_cr4_feature_bits() 58 cr4 |= X86_CR4_VMXE; in calc_supported_cr4_feature_bits() 60 cr4 |= X86_CR4_SMXE; in calc_supported_cr4_feature_bits() 62 cr4 |= X86_CR4_FSGSBASE; in calc_supported_cr4_feature_bits() 64 cr4 |= X86_CR4_PCIDE; in calc_supported_cr4_feature_bits() [all …]
|
D | cr4_cpuid_sync_test.c | 24 uint64_t cr4 = get_cr4(); in cr4_cpuid_is_sync() local 26 return (this_cpu_has(X86_FEATURE_OSXSAVE) == !!(cr4 & X86_CR4_OSXSAVE)); in cr4_cpuid_is_sync() 31 uint64_t cr4; in guest_code() local 34 cr4 = get_cr4(); in guest_code() 35 cr4 |= X86_CR4_OSXSAVE; in guest_code() 36 set_cr4(cr4); in guest_code() 78 sregs.cr4 &= ~X86_CR4_OSXSAVE; in main()
|
D | amx_test.c | 223 uint64_t cr4, xcr0; in init_regs() local 226 cr4 = get_cr4(); in init_regs() 227 cr4 |= X86_CR4_OSXSAVE; in init_regs() 228 set_cr4(cr4); in init_regs()
|
/Linux-v6.1/drivers/misc/lkdtm/ |
D | bugs.c | 461 unsigned long cr4; in lkdtm_UNSET_SMEP() local 464 cr4 = native_read_cr4(); in lkdtm_UNSET_SMEP() 466 if ((cr4 & X86_CR4_SMEP) != X86_CR4_SMEP) { in lkdtm_UNSET_SMEP() 470 cr4 &= ~(X86_CR4_SMEP); in lkdtm_UNSET_SMEP() 473 native_write_cr4(cr4); in lkdtm_UNSET_SMEP() 474 if (cr4 == native_read_cr4()) { in lkdtm_UNSET_SMEP() 476 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP() 478 native_write_cr4(cr4); in lkdtm_UNSET_SMEP() 507 direct_write_cr4(cr4); in lkdtm_UNSET_SMEP() 512 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP() [all …]
|
/Linux-v6.1/arch/x86/include/asm/ |
D | tlbflush.h | 115 unsigned long cr4; member 166 this_cpu_write(cpu_tlbstate.cr4, __read_cr4()); in cr4_init_shadow() 362 static inline void __native_tlb_flush_global(unsigned long cr4) in __native_tlb_flush_global() argument 364 native_write_cr4(cr4 ^ X86_CR4_PGE); in __native_tlb_flush_global() 365 native_write_cr4(cr4); in __native_tlb_flush_global()
|
D | suspend_32.h | 20 unsigned long cr0, cr2, cr3, cr4; member
|
/Linux-v6.1/arch/x86/power/ |
D | hibernate_asm_32.S | 55 jecxz 1f # cr4 Pentium and higher, skip if zero 57 movl %ecx, %cr4; # turn off PGE 89 jecxz 1f # cr4 Pentium and higher, skip if zero 90 movl %ecx, %cr4; # turn PGE back on
|
D | hibernate_asm_64.S | 36 movq %rdx, %cr4; # turn off PGE 39 movq %rax, %cr4; # turn PGE back on 127 movq %rcx, %cr4; # turn off PGE 130 movq %rbx, %cr4; # turn PGE back on
|
D | cpu.c | 125 ctxt->cr4 = __read_cr4(); in __save_processor_state() 205 if (ctxt->cr4) in __restore_processor_state() 206 __write_cr4(ctxt->cr4); in __restore_processor_state() 210 __write_cr4(ctxt->cr4); in __restore_processor_state()
|
/Linux-v6.1/arch/x86/kernel/ |
D | sev_verify_cbit.S | 35 movq %cr4, %rsi 40 movq %rdx, %cr4 71 movq %rsi, %cr4
|
D | relocate_kernel_64.S | 70 movq %cr4, %rax 125 movq %cr4, %rax 127 movq %rax, %cr4 153 movq %rax, %cr4 237 movq %rax, %cr4
|
D | process_32.c | 62 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local 83 cr4 = __read_cr4(); in __show_regs() 85 log_lvl, cr0, cr2, cr3, cr4); in __show_regs()
|
D | head_64.S | 184 movq %cr4, %rcx 198 movq %rcx, %cr4 230 movq %cr4, %rcx 233 movq %rcx, %cr4 234 movq %rax, %cr4
|
D | process.c | 643 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_toggle_bits_irqsoff() local 645 newval = cr4 ^ mask; in cr4_toggle_bits_irqsoff() 646 if (newval != cr4) { in cr4_toggle_bits_irqsoff() 647 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_toggle_bits_irqsoff()
|
D | relocate_kernel_32.S | 56 movl %cr4, %eax 129 movl %eax, %cr4 207 movl %eax, %cr4
|
/Linux-v6.1/arch/x86/platform/pvh/ |
D | head.S | 74 mov %cr4, %eax 76 mov %eax, %cr4 133 mov %cr4, %eax 135 mov %eax, %cr4
|
/Linux-v6.1/arch/x86/kernel/cpu/mtrr/ |
D | cyrix.c | 135 static u32 cr4, ccr3; variable 143 cr4 = __read_cr4(); in prepare_set() 144 __write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set() 176 __write_cr4(cr4); in post_set()
|
/Linux-v6.1/arch/x86/mm/ |
D | mem_encrypt_boot.S | 102 mov %cr4, %rdx 104 mov %rdx, %cr4 106 mov %rdx, %cr4
|
/Linux-v6.1/include/xen/interface/hvm/ |
D | hvm_vcpu.h | 25 uint32_t cr4; member 88 uint64_t cr4; member
|
/Linux-v6.1/tools/testing/selftests/kvm/lib/x86_64/ |
D | vmx.c | 133 unsigned long cr4; in prepare_for_vmx_operation() local 145 __asm__ __volatile__("mov %%cr4, %0" : "=r"(cr4) : : "memory"); in prepare_for_vmx_operation() 146 cr4 &= rdmsr(MSR_IA32_VMX_CR4_FIXED1); in prepare_for_vmx_operation() 147 cr4 |= rdmsr(MSR_IA32_VMX_CR4_FIXED0); in prepare_for_vmx_operation() 149 cr4 |= X86_CR4_VMXE; in prepare_for_vmx_operation() 150 __asm__ __volatile__("mov %0, %%cr4" : : "r"(cr4) : "memory"); in prepare_for_vmx_operation()
|
/Linux-v6.1/arch/x86/platform/olpc/ |
D | xo1-wakeup.S | 30 movl %eax, %cr4 64 movl %cr4, %edx
|
/Linux-v6.1/arch/x86/kernel/cpu/ |
D | common.c | 464 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_update_irqsoff() local 468 newval = (cr4 & ~clear) | set; in cr4_update_irqsoff() 469 if (newval != cr4) { in cr4_update_irqsoff() 470 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_update_irqsoff() 479 return this_cpu_read(cpu_tlbstate.cr4); in cr4_read_shadow() 485 unsigned long cr4 = __read_cr4(); in cr4_init() local 488 cr4 |= X86_CR4_PCIDE; in cr4_init() 490 cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits; in cr4_init() 492 __write_cr4(cr4); in cr4_init() 495 this_cpu_write(cpu_tlbstate.cr4, cr4); in cr4_init() [all …]
|
/Linux-v6.1/arch/x86/boot/compressed/ |
D | head_64.S | 172 movl %cr4, %eax 174 movl %eax, %cr4 632 movl %cr4, %eax 638 movl %cr4, %eax 666 movl %cr4, %eax 678 movl %eax, %cr4
|
D | efi_thunk_64.S | 152 movl %cr4, %eax 154 movl %eax, %cr4
|
/Linux-v6.1/arch/x86/kvm/svm/ |
D | nested.c | 92 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context() 300 if (CC(!(save->cr4 & X86_CR4_PAE)) || in __nested_vmcb_check_save() 307 if (CC(!__kvm_is_valid_cr4(vcpu, save->cr4))) in __nested_vmcb_check_save() 393 to->cr4 = from->cr4; in __nested_copy_vmcb_save_to_cache() 562 svm_set_cr4(&svm->vcpu, svm->nested.save.cr4); in nested_vmcb02_prepare_save() 859 vmcb01->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun() 906 to_save->cr4 = from_save->cr4; in svm_copy_vmrun_state() 969 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit() 1040 svm_set_cr4(vcpu, vmcb01->save.cr4); in nested_svm_vmexit()
|