Searched refs:is_guest_mode (Results 1 – 8 of 8) sorted by relevance
34 bool is_guest_mode; member
93 if (!is_guest_mode(v) && kvm_vcpu_apicv_active(v)) in kvm_cpu_has_injectable_intr()
103 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) in is_guest_mode() function
505 if (!is_guest_mode(&svm->vcpu)) in recalc_intercepts()520 if (is_guest_mode(&svm->vcpu)) in get_host_vmcb()1043 msrpm = is_guest_mode(vcpu) ? to_svm(vcpu)->nested.msrpm: in msr_write_intercepted()1441 if (is_guest_mode(vcpu)) in svm_read_l1_tsc_offset()1452 if (is_guest_mode(vcpu)) { in svm_write_tsc_offset()2960 if (!is_guest_mode(&svm->vcpu)) in nested_svm_check_exception()2992 if (!is_guest_mode(&svm->vcpu)) in nested_svm_intr()3031 if (!is_guest_mode(&svm->vcpu)) in nested_svm_nmi()3904 if (!is_guest_mode(&svm->vcpu) || in check_selective_cr0_intercepted()4266 if (is_guest_mode(vcpu)) in svm_set_msr()[all …]
2594 if (is_guest_mode(vcpu)) in update_exception_bitmap()3426 if (is_guest_mode(vcpu) && in vmx_read_l1_tsc_offset()3438 if (is_guest_mode(vcpu)) { in vmx_write_tsc_offset()5314 (!is_guest_mode(vcpu) || nested_ept_ad_enabled(vcpu))) in construct_eptp()5341 is_guest_mode(vcpu)) in vmx_set_cr3()5373 } else if (!is_guest_mode(vcpu) || in vmx_set_cr4()6181 if (WARN_ON_ONCE(!is_guest_mode(vcpu)) || in vmx_guest_apic_has_interrupt()6239 if (is_guest_mode(vcpu) && in vmx_deliver_nested_posted_interrupt()6349 if (is_guest_mode(&vmx->vcpu)) in set_cr4_guest_host_mask()7192 if (is_guest_mode(vcpu)) { in handle_set_cr0()[all …]
492 is_guest_mode(vcpu) && fault->async_page_fault; in kvm_inject_page_fault()3417 is_guest_mode(vcpu))) in kvm_vcpu_ioctl_x86_set_vcpu_events()5867 if (!is_guest_mode(vcpu) && kvm_x86_ops->get_cpl(vcpu) == 0) { in handle_emulation_failure()5889 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in reexecute_instruction()5980 if (WARN_ON_ONCE(is_guest_mode(vcpu))) in retry_instruction()6978 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) { in inject_pending_event()7028 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) { in inject_pending_event()7319 if (is_guest_mode(vcpu)) in vcpu_scan_ioapic()7713 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) in kvm_vcpu_running()9226 return (is_guest_mode(vcpu) && in kvm_guest_apic_has_interrupt()
3960 if (!vcpu->arch.apf.delivery_as_pf_vmexit && is_guest_mode(vcpu)) in kvm_can_do_async_pf()3975 if (is_guest_mode(vcpu) && !kvm_is_visible_gfn(vcpu->kvm, gfn)) { in try_async_pf()4731 role.guest_mode = is_guest_mode(vcpu); in kvm_calc_tdp_mmu_root_page_role()4798 role.guest_mode = is_guest_mode(vcpu); in kvm_calc_shadow_mmu_root_page_role()5283 if (!mmio_info_in_cache(vcpu, cr2, direct) && !is_guest_mode(vcpu)) in kvm_mmu_page_fault()
4321 pi_data->is_guest_mode = false; in amd_ir_set_vcpu_affinity()4329 if (pi_data->is_guest_mode) { in amd_ir_set_vcpu_affinity()