Lines Matching refs:efer
829 if ((vcpu->arch.efer & EFER_LME) && !is_paging(vcpu) && in kvm_set_cr0()
840 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) && in kvm_set_cr0()
1430 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in __kvm_valid_efer() argument
1432 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT)) in __kvm_valid_efer()
1435 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM)) in __kvm_valid_efer()
1438 if (efer & (EFER_LME | EFER_LMA) && in __kvm_valid_efer()
1442 if (efer & EFER_NX && !guest_cpuid_has(vcpu, X86_FEATURE_NX)) in __kvm_valid_efer()
1448 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) in kvm_valid_efer() argument
1450 if (efer & efer_reserved_bits) in kvm_valid_efer()
1453 return __kvm_valid_efer(vcpu, efer); in kvm_valid_efer()
1459 u64 old_efer = vcpu->arch.efer; in set_efer()
1460 u64 efer = msr_info->data; in set_efer() local
1463 if (efer & efer_reserved_bits) in set_efer()
1467 if (!__kvm_valid_efer(vcpu, efer)) in set_efer()
1471 (vcpu->arch.efer & EFER_LME) != (efer & EFER_LME)) in set_efer()
1475 efer &= ~EFER_LMA; in set_efer()
1476 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
1478 r = kvm_x86_ops.set_efer(vcpu, efer); in set_efer()
1485 if ((efer ^ old_efer) & EFER_NX) in set_efer()
3465 msr_info->data = vcpu->arch.efer; in kvm_get_msr_common()
8481 put_smstate(u64, buf, 0x7ed0, vcpu->arch.efer); in enter_smm_save_state_64()
9442 sregs->efer = vcpu->arch.efer; in __get_sregs()
9540 if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) { in kvm_valid_sregs()
9547 || !(sregs->efer & EFER_LMA)) in kvm_valid_sregs()
9554 if (sregs->efer & EFER_LMA || sregs->cs.l) in kvm_valid_sregs()
9592 mmu_reset_needed |= vcpu->arch.efer != sregs->efer; in __set_sregs()
9593 kvm_x86_ops.set_efer(vcpu, sregs->efer); in __set_sregs()