Searched refs:EFER_LME (Results 1 – 7 of 7) sorted by relevance
33 #define EFER_LME (1<<_EFER_LME) macro
342 #define EFER_LME (1<<8) /* Long mode enable */ macro
630 sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX); in vcpu_setup()
2802 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()2818 guest_efer &= ~EFER_LME; in update_transition_efer()5079 msr->data = efer & ~EFER_LME; in vmx_set_efer()5281 if (vcpu->arch.efer & EFER_LME) { in vmx_set_cr0()12341 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in prepare_vmcs02()12343 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in prepare_vmcs02()12576 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME))) in check_vmentry_postreqs()12591 ia32e != !!(vmcs12->host_ia32_efer & EFER_LME)) in check_vmentry_postreqs()13160 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()13162 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
740 efer &= ~EFER_LME; in svm_set_efer()2499 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()2502 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()2507 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
90 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));673 if ((vcpu->arch.efer & EFER_LME)) { in kvm_set_cr0()1188 && (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()8155 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_valid_sregs()
4171 if ((new_val & X86_CR0_PG) && (efer & EFER_LME) && in check_cr_write()