Searched refs:is_guest_mode (Results 1 – 10 of 10) sorted by relevance
22 bool is_guest_mode; member
81 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
143 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
508 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()523 if (is_guest_mode(&svm->vcpu)) in get_host_vmcb()1049 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()1452 if (is_guest_mode(vcpu)) in svm_read_l1_tsc_offset()1463 if (is_guest_mode(vcpu)) { in svm_write_l1_tsc_offset()3024 if (!is_guest_mode(&svm->vcpu)) in nested_svm_check_exception()3053 if (!is_guest_mode(&svm->vcpu)) in nested_svm_intr()3092 if (!is_guest_mode(&svm->vcpu)) in nested_svm_nmi()3966 if (!is_guest_mode(&svm->vcpu) || in check_selective_cr0_intercepted()4310 if (is_guest_mode(vcpu)) in svm_set_msr()[all …]
509 !is_guest_mode(vcpu)) in kvm_multiple_exception()584 is_guest_mode(vcpu) && fault->async_page_fault; in kvm_inject_page_fault()6349 if (!is_guest_mode(vcpu) && kvm_x86_ops->get_cpl(vcpu) == 0) { in handle_emulation_failure()6369 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in reexecute_instruction()6460 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in retry_instruction()7534 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) { in inject_pending_event()7596 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) { in inject_pending_event()7890 if (is_guest_mode(vcpu)) in vcpu_scan_ioapic()8302 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) in kvm_vcpu_running()9837 return (is_guest_mode(vcpu) && in kvm_guest_apic_has_interrupt()[all …]
4195 if (is_guest_mode(vcpu) && !kvm_is_visible_gfn(vcpu->kvm, gfn)) { in try_async_pf()4977 role.base.guest_mode = is_guest_mode(vcpu); in kvm_calc_mmu_role_common()5574 if (!mmio_info_in_cache(vcpu, cr2, direct) && !is_guest_mode(vcpu)) in kvm_mmu_page_fault()
802 if (is_guest_mode(vcpu)) in update_exception_bitmap()1698 if (is_guest_mode(vcpu) && in vmx_read_l1_tsc_offset()1716 if (is_guest_mode(vcpu) && in vmx_write_l1_tsc_offset()1932 if (is_guest_mode(vcpu)) in vmx_set_msr()1937 if (is_guest_mode(vcpu)) in vmx_set_msr()1942 if (is_guest_mode(vcpu)) in vmx_set_msr()1947 if (is_guest_mode(vcpu) && get_vmcs12(vcpu)->vm_exit_controls & in vmx_set_msr()2035 if (is_guest_mode(vcpu) && in vmx_set_msr()2988 (!is_guest_mode(vcpu) || nested_ept_ad_enabled(vcpu))) in construct_eptp()3015 is_guest_mode(vcpu)) in vmx_set_cr3()[all …]
57 return is_guest_mode(vcpu) || vmx->nested.current_vmptr != -1ull || in vmx_has_valid_vmcs12()
426 WARN_ON(!is_guest_mode(vcpu)); in vmx_inject_page_fault_nested()4616 if (!is_guest_mode(vcpu)) in handle_vmread()4636 if (!is_guest_mode(vcpu) && is_vmcs12_ext_field(field)) in handle_vmread()4738 if (!is_guest_mode(vcpu)) { in handle_vmwrite()4781 if (!is_guest_mode(vcpu) && !is_shadow_field_rw(field)) { in handle_vmwrite()5097 if (!is_guest_mode(vcpu)) { in handle_vmfunc()5533 if (is_guest_mode(vcpu) && in vmx_get_nested_state()5545 if (is_guest_mode(vcpu)) { in vmx_get_nested_state()5569 if (is_guest_mode(vcpu)) { in vmx_get_nested_state()5605 if (is_guest_mode(vcpu)) { in vmx_leave_nested()
4472 pi_data->is_guest_mode = false; in amd_ir_set_vcpu_affinity()4480 if (pi_data->is_guest_mode) { in amd_ir_set_vcpu_affinity()