Searched refs:EFER_LME (Results 1 – 8 of 8) sorted by relevance
35 #define EFER_LME (1<<_EFER_LME) macro
363 #define EFER_LME (1<<8) /* Long mode enable */ macro
1966 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); in nested_vmx_calc_efer()1968 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); in nested_vmx_calc_efer()2716 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) in nested_vmx_check_host_state()2797 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) in nested_vmx_check_guest_state()3795 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()3797 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
981 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()995 guest_efer &= ~EFER_LME; in update_transition_efer()2808 msr->data = efer & ~EFER_LME; in vmx_set_efer()2955 if (vcpu->arch.efer & EFER_LME) { in vmx_set_cr0()
627 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_setup()
90 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));772 if ((vcpu->arch.efer & EFER_LME)) { in kvm_set_cr0()1376 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()1408 (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()8750 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_valid_sregs()
743 efer &= ~EFER_LME; in svm_set_efer()2558 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()2561 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()2566 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
4223 if ((new_val & X86_CR0_PG) && (efer & EFER_LME) && in check_cr_write()