Home
last modified time | relevance | path

Searched refs:efer (Results 1 – 23 of 23) sorted by relevance

/Linux-v5.4/arch/x86/include/asm/
Dvirtext.h112 uint64_t efer; in cpu_svm_disable() local
115 rdmsrl(MSR_EFER, efer); in cpu_svm_disable()
116 wrmsrl(MSR_EFER, efer & ~EFER_SVME); in cpu_svm_disable()
Dsuspend_64.h41 unsigned long efer; member
Drealmode.h48 u64 efer;
Dsvm.h175 u64 efer; member
Dkvm_host.h555 u64 efer; member
1049 void (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
1359 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
/Linux-v5.4/arch/x86/realmode/
Dinit.c50 u64 efer; in setup_real_mode() local
97 rdmsrl(MSR_EFER, efer); in setup_real_mode()
98 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
/Linux-v5.4/include/xen/interface/hvm/
Dhvm_vcpu.h52 uint64_t efer; member
106 uint64_t efer; member
/Linux-v5.4/arch/x86/kvm/
Demulate.c809 u64 efer = 0; in assign_eip_far() local
811 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in assign_eip_far()
812 if (efer & EFER_LMA) in assign_eip_far()
1568 u64 efer = 0; in get_descriptor_ptr() local
1570 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in get_descriptor_ptr()
1571 if (!(efer & EFER_LMA)) in get_descriptor_ptr()
1719 u64 efer = 0; in __load_segment_descriptor() local
1721 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer); in __load_segment_descriptor()
1722 if (efer & EFER_LMA) in __load_segment_descriptor()
2584 unsigned long cr0, cr4, efer; in em_rsm() local
[all …]
Dx86.h87 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
106 return (vcpu->arch.efer & EFER_LMA) && in is_la57_mode()
Dsvm.c734 static void svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) in svm_set_efer() argument
736 vcpu->arch.efer = efer; in svm_set_efer()
740 efer |= EFER_NX; in svm_set_efer()
742 if (!(efer & EFER_LMA)) in svm_set_efer()
743 efer &= ~EFER_LME; in svm_set_efer()
746 to_svm(vcpu)->vmcb->save.efer = efer | EFER_SVME; in svm_set_efer()
913 uint64_t efer; in svm_hardware_enable() local
917 rdmsrl(MSR_EFER, efer); in svm_hardware_enable()
918 if (efer & EFER_SVME) in svm_hardware_enable()
939 wrmsrl(MSR_EFER, efer | EFER_SVME); in svm_hardware_enable()
[all …]
Dx86.c772 if ((vcpu->arch.efer & EFER_LME)) { in kvm_set_cr0()
1368 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in __kvm_valid_efer() argument
1370 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT)) in __kvm_valid_efer()
1373 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM)) in __kvm_valid_efer()
1376 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()
1380 if (efer & EFER_NX && !guest_cpuid_has(vcpu, X86_FEATURE_NX)) in __kvm_valid_efer()
1386 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in kvm_valid_efer() argument
1388 if (efer & efer_reserved_bits) in kvm_valid_efer()
1391 return __kvm_valid_efer(vcpu, efer); in kvm_valid_efer()
1397 u64 old_efer = vcpu->arch.efer; in set_efer()
[all …]
Dcpuid.c157 unsigned long long efer = 0; in is_efer_nx() local
159 rdmsrl_safe(MSR_EFER, &efer); in is_efer_nx()
160 return efer & EFER_NX; in is_efer_nx()
Dmmu.c601 return vcpu->arch.efer & EFER_NX; in is_nx()
/Linux-v5.4/arch/x86/power/
Dcpu.c116 rdmsrl(MSR_EFER, ctxt->efer); in __save_processor_state()
207 wrmsrl(MSR_EFER, ctxt->efer); in __restore_processor_state()
/Linux-v5.4/arch/x86/include/uapi/asm/
Dkvm.h155 __u64 efer; member
/Linux-v5.4/tools/arch/x86/include/uapi/asm/
Dkvm.h155 __u64 efer; member
/Linux-v5.4/arch/x86/kvm/vmx/
Dvmx.c969 u64 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
993 (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX))) { in update_transition_efer()
1668 if (is_long_mode(&vmx->vcpu) && (vmx->vcpu.arch.efer & EFER_SCE)) { in setup_msrs()
2793 void vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) in vmx_set_efer() argument
2801 vcpu->arch.efer = efer; in vmx_set_efer()
2802 if (efer & EFER_LMA) { in vmx_set_efer()
2804 msr->data = efer; in vmx_set_efer()
2808 msr->data = efer & ~EFER_LME; in vmx_set_efer()
2829 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()
2835 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()
[all …]
Dvmx.h320 void vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer);
Dnested.c1966 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
1968 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()
2402 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02()
2404 vmx_set_efer(vcpu, vcpu->arch.efer); in prepare_vmcs02()
2668 ia32e = !!(vcpu->arch.efer & EFER_LMA); in nested_vmx_check_host_state()
3715 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
3793 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
3795 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()
3797 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
3798 vmx_set_efer(vcpu, vcpu->arch.efer); in load_vmcs12_host_state()
/Linux-v5.4/drivers/parport/
Dparport_pc.c1110 static void decode_winbond(int efer, int key, int devid, int devrev, int oldid) in decode_winbond() argument
1156 efer, key, devid, devrev, oldid, type); in decode_winbond()
1159 show_parconfig_winbond(efer, key); in decode_winbond()
1162 static void decode_smsc(int efer, int key, int devid, int devrev) in decode_smsc() argument
1189 efer, key, devid, devrev, type); in decode_smsc()
1192 func(efer, key); in decode_smsc()
/Linux-v5.4/tools/testing/selftests/kvm/lib/x86_64/
Dprocessor.c220 sregs->cr8, sregs->efer, sregs->apic_base); in sregs_dump()
627 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_setup()
/Linux-v5.4/Documentation/virt/kvm/
Dmmu.txt165 Contains the value of efer.nxe for which the page is valid.
330 - mov to cr0/cr4/efer
Dapi.txt423 __u64 efer;