Searched refs:EFER_LMA (Results 1 – 10 of 10) sorted by relevance
87 return vcpu->arch.efer & EFER_LMA; in is_long_mode()106 return (vcpu->arch.efer & EFER_LMA) && in is_la57_mode()
812 if (efer & EFER_LMA) in assign_eip_far()1571 if (!(efer & EFER_LMA)) in get_descriptor_ptr()1722 if (efer & EFER_LMA) in __load_segment_descriptor()2544 ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA); in rsm_load_state_64()2783 if (efer & EFER_LMA) { in em_syscall()2791 if (efer & EFER_LMA) { in em_syscall()2833 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) in em_sysenter()2850 if (efer & EFER_LMA) { in em_sysenter()2859 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; in em_sysenter()2862 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data : in em_sysenter()[all …]
742 if (!(efer & EFER_LMA)) in svm_set_efer()2560 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()2561 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()2565 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()2566 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
90 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));1376 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()1412 efer &= ~EFER_LMA; in set_efer()1413 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()8757 || !(sregs->efer & EFER_LMA)) in kvm_valid_sregs()8764 if (sregs->efer & EFER_LMA || sregs->cs.l) in kvm_valid_sregs()
98 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
36 #define EFER_LMA (1<<_EFER_LMA) macro
1966 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()1968 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()2153 if (guest_efer & EFER_LMA) in prepare_vmcs02_early()2668 ia32e = !!(vcpu->arch.efer & EFER_LMA); in nested_vmx_check_host_state()2715 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()2795 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()3795 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()3797 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
981 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()983 if (guest_efer & EFER_LMA) in update_transition_efer()994 if (!(guest_efer & EFER_LMA)) in update_transition_efer()2802 if (efer & EFER_LMA) { in vmx_set_efer()2829 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()2835 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()5723 (cr4 & X86_CR4_PAE) && !(efer & EFER_LMA)) in dump_vmcs()
364 #define EFER_LMA (1<<10) /* Long mode active (read-only) */ macro
627 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_setup()