/Linux-v5.15/tools/testing/selftests/kvm/x86_64/ |
D | set_sregs_test.c | 34 if (orig->cr4 & feature_bit) in test_cr4_feature_bit() 38 sregs.cr4 |= feature_bit; in test_cr4_feature_bit() 51 uint64_t cr4; in calc_cr4_feature_bits() local 56 cr4 = X86_CR4_VME | X86_CR4_PVI | X86_CR4_TSD | X86_CR4_DE | in calc_cr4_feature_bits() 60 cr4 |= X86_CR4_UMIP; in calc_cr4_feature_bits() 62 cr4 |= X86_CR4_LA57; in calc_cr4_feature_bits() 64 cr4 |= X86_CR4_VMXE; in calc_cr4_feature_bits() 66 cr4 |= X86_CR4_SMXE; in calc_cr4_feature_bits() 68 cr4 |= X86_CR4_FSGSBASE; in calc_cr4_feature_bits() 70 cr4 |= X86_CR4_PCIDE; in calc_cr4_feature_bits() [all …]
|
D | cr4_cpuid_sync_test.c | 30 uint64_t cr4; in cr4_cpuid_is_sync() local 38 cr4 = get_cr4(); in cr4_cpuid_is_sync() 40 return (!!(ecx & X86_FEATURE_OSXSAVE)) == (!!(cr4 & X86_CR4_OSXSAVE)); in cr4_cpuid_is_sync() 45 uint64_t cr4; in guest_code() local 48 cr4 = get_cr4(); in guest_code() 49 cr4 |= X86_CR4_OSXSAVE; in guest_code() 50 set_cr4(cr4); in guest_code() 99 sregs.cr4 &= ~X86_CR4_OSXSAVE; in main()
|
/Linux-v5.15/drivers/misc/lkdtm/ |
D | bugs.c | 374 unsigned long cr4; in lkdtm_UNSET_SMEP() local 377 cr4 = native_read_cr4(); in lkdtm_UNSET_SMEP() 379 if ((cr4 & X86_CR4_SMEP) != X86_CR4_SMEP) { in lkdtm_UNSET_SMEP() 383 cr4 &= ~(X86_CR4_SMEP); in lkdtm_UNSET_SMEP() 386 native_write_cr4(cr4); in lkdtm_UNSET_SMEP() 387 if (cr4 == native_read_cr4()) { in lkdtm_UNSET_SMEP() 389 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP() 391 native_write_cr4(cr4); in lkdtm_UNSET_SMEP() 420 direct_write_cr4(cr4); in lkdtm_UNSET_SMEP() 425 cr4 |= X86_CR4_SMEP; in lkdtm_UNSET_SMEP() [all …]
|
/Linux-v5.15/arch/x86/power/ |
D | hibernate_asm_32.S | 55 jecxz 1f # cr4 Pentium and higher, skip if zero 57 movl %ecx, %cr4; # turn off PGE 89 jecxz 1f # cr4 Pentium and higher, skip if zero 90 movl %ecx, %cr4; # turn PGE back on
|
D | hibernate_asm_64.S | 36 movq %rdx, %cr4; # turn off PGE 39 movq %rax, %cr4; # turn PGE back on 127 movq %rcx, %cr4; # turn off PGE 130 movq %rbx, %cr4; # turn PGE back on
|
D | cpu.c | 123 ctxt->cr4 = __read_cr4(); in __save_processor_state() 203 if (ctxt->cr4) in __restore_processor_state() 204 __write_cr4(ctxt->cr4); in __restore_processor_state() 208 __write_cr4(ctxt->cr4); in __restore_processor_state()
|
/Linux-v5.15/arch/x86/kernel/ |
D | sev_verify_cbit.S | 35 movq %cr4, %rsi 40 movq %rdx, %cr4 71 movq %rsi, %cr4
|
D | process_32.c | 62 unsigned long cr0 = 0L, cr2 = 0L, cr3 = 0L, cr4 = 0L; in __show_regs() local 86 cr4 = __read_cr4(); in __show_regs() 88 log_lvl, cr0, cr2, cr3, cr4); in __show_regs()
|
D | relocate_kernel_64.S | 68 movq %cr4, %rax 141 movq %rax, %cr4 220 movq %rax, %cr4
|
D | relocate_kernel_32.S | 54 movl %cr4, %eax 125 movl %eax, %cr4 198 movl %eax, %cr4
|
D | process.c | 626 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_toggle_bits_irqsoff() local 628 newval = cr4 ^ mask; in cr4_toggle_bits_irqsoff() 629 if (newval != cr4) { in cr4_toggle_bits_irqsoff() 630 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_toggle_bits_irqsoff()
|
/Linux-v5.15/arch/x86/platform/pvh/ |
D | head.S | 73 mov %cr4, %eax 75 mov %eax, %cr4 132 mov %cr4, %eax 134 mov %eax, %cr4
|
/Linux-v5.15/arch/x86/kernel/cpu/mtrr/ |
D | cyrix.c | 135 static u32 cr4, ccr3; variable 143 cr4 = __read_cr4(); in prepare_set() 144 __write_cr4(cr4 & ~X86_CR4_PGE); in prepare_set() 176 __write_cr4(cr4); in post_set()
|
/Linux-v5.15/arch/x86/mm/ |
D | mem_encrypt_boot.S | 99 mov %cr4, %rdx 101 mov %rdx, %cr4 103 mov %rdx, %cr4
|
D | tlb.c | 1151 unsigned long cr4, flags; in native_flush_tlb_global() local 1171 cr4 = this_cpu_read(cpu_tlbstate.cr4); in native_flush_tlb_global() 1173 native_write_cr4(cr4 ^ X86_CR4_PGE); in native_flush_tlb_global() 1175 native_write_cr4(cr4); in native_flush_tlb_global()
|
/Linux-v5.15/include/xen/interface/hvm/ |
D | hvm_vcpu.h | 42 uint32_t cr4; member 105 uint64_t cr4; member
|
/Linux-v5.15/arch/x86/kernel/cpu/ |
D | common.c | 408 unsigned long newval, cr4 = this_cpu_read(cpu_tlbstate.cr4); in cr4_update_irqsoff() local 412 newval = (cr4 & ~clear) | set; in cr4_update_irqsoff() 413 if (newval != cr4) { in cr4_update_irqsoff() 414 this_cpu_write(cpu_tlbstate.cr4, newval); in cr4_update_irqsoff() 423 return this_cpu_read(cpu_tlbstate.cr4); in cr4_read_shadow() 429 unsigned long cr4 = __read_cr4(); in cr4_init() local 432 cr4 |= X86_CR4_PCIDE; in cr4_init() 434 cr4 = (cr4 & ~cr4_pinned_mask) | cr4_pinned_bits; in cr4_init() 436 __write_cr4(cr4); in cr4_init() 439 this_cpu_write(cpu_tlbstate.cr4, cr4); in cr4_init() [all …]
|
/Linux-v5.15/tools/testing/selftests/kvm/include/x86_64/ |
D | processor.h | 236 uint64_t cr4; in get_cr4() local 239 : /* output */ [cr4]"=r"(cr4)); in get_cr4() 240 return cr4; in get_cr4()
|
/Linux-v5.15/arch/x86/include/asm/ |
D | tlbflush.h | 114 unsigned long cr4; member 165 this_cpu_write(cpu_tlbstate.cr4, __read_cr4()); in cr4_init_shadow()
|
D | suspend_32.h | 20 unsigned long cr0, cr2, cr3, cr4; member
|
D | suspend_64.h | 37 unsigned long cr0, cr2, cr3, cr4; member
|
/Linux-v5.15/tools/testing/selftests/kvm/lib/x86_64/ |
D | vmx.c | 136 unsigned long cr4; in prepare_for_vmx_operation() local 148 __asm__ __volatile__("mov %%cr4, %0" : "=r"(cr4) : : "memory"); in prepare_for_vmx_operation() 149 cr4 &= rdmsr(MSR_IA32_VMX_CR4_FIXED1); in prepare_for_vmx_operation() 150 cr4 |= rdmsr(MSR_IA32_VMX_CR4_FIXED0); in prepare_for_vmx_operation() 152 cr4 |= X86_CR4_VMXE; in prepare_for_vmx_operation() 153 __asm__ __volatile__("mov %0, %%cr4" : : "r"(cr4) : "memory"); in prepare_for_vmx_operation()
|
/Linux-v5.15/arch/x86/platform/olpc/ |
D | xo1-wakeup.S | 30 movl %eax, %cr4 64 movl %cr4, %edx
|
/Linux-v5.15/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 144 movl %cr4, %eax 146 movl %eax, %cr4
|
/Linux-v5.15/arch/x86/kvm/svm/ |
D | nested.c | 107 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context() 272 if (CC(!(save->cr4 & X86_CR4_PAE)) || in nested_vmcb_check_cr3_cr4() 278 if (CC(!kvm_is_valid_cr4(vcpu, save->cr4))) in nested_vmcb_check_cr3_cr4() 486 svm_set_cr4(&svm->vcpu, vmcb12->save.cr4); in nested_vmcb02_prepare_save() 677 svm->vmcb01.ptr->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun() 722 to_save->cr4 = from_save->cr4; in svm_copy_vmrun_state() 787 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit() 843 svm_set_cr4(vcpu, svm->vmcb->save.cr4); in nested_svm_vmexit()
|