Lines Matching refs:nested

180 	if (to_vmx(vcpu)->nested.hv_evmcs_vmptr != EVMPTR_INVALID)  in nested_vmx_failValid()
181 to_vmx(vcpu)->nested.need_vmcs12_to_shadow_sync = true; in nested_vmx_failValid()
194 if (vmx->nested.current_vmptr == -1ull && in nested_vmx_fail()
195 !evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in nested_vmx_fail()
222 vmx->nested.need_vmcs12_to_shadow_sync = false; in vmx_disable_shadow_vmcs()
229 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in nested_release_evmcs()
230 kvm_vcpu_unmap(vcpu, &vmx->nested.hv_evmcs_map, true); in nested_release_evmcs()
231 vmx->nested.hv_evmcs = NULL; in nested_release_evmcs()
234 vmx->nested.hv_evmcs_vmptr = EVMPTR_INVALID; in nested_release_evmcs()
286 if (!vmx->nested.vmxon && !vmx->nested.smm.vmxon) in free_nested()
291 vmx->nested.vmxon = false; in free_nested()
292 vmx->nested.smm.vmxon = false; in free_nested()
293 free_vpid(vmx->nested.vpid02); in free_nested()
294 vmx->nested.posted_intr_nv = -1; in free_nested()
295 vmx->nested.current_vmptr = -1ull; in free_nested()
302 kfree(vmx->nested.cached_vmcs12); in free_nested()
303 vmx->nested.cached_vmcs12 = NULL; in free_nested()
304 kfree(vmx->nested.cached_shadow_vmcs12); in free_nested()
305 vmx->nested.cached_shadow_vmcs12 = NULL; in free_nested()
307 if (vmx->nested.apic_access_page) { in free_nested()
308 kvm_release_page_clean(vmx->nested.apic_access_page); in free_nested()
309 vmx->nested.apic_access_page = NULL; in free_nested()
311 kvm_vcpu_unmap(vcpu, &vmx->nested.virtual_apic_map, true); in free_nested()
312 kvm_vcpu_unmap(vcpu, &vmx->nested.pi_desc_map, true); in free_nested()
313 vmx->nested.pi_desc = NULL; in free_nested()
319 free_loaded_vmcs(&vmx->nested.vmcs02); in free_nested()
366 if (vmx->nested.pml_full) { in nested_ept_inject_page_fault()
368 vmx->nested.pml_full = false; in nested_ept_inject_page_fault()
394 to_vmx(vcpu)->nested.msrs.ept_caps & in nested_ept_new_eptp()
478 !to_vmx(vcpu)->nested.nested_run_pending) { in vmx_inject_page_fault_nested()
611 unsigned long *msr_bitmap_l0 = to_vmx(vcpu)->nested.vmcs02.msr_bitmap; in nested_vmx_prepare_msr_bitmap()
612 struct kvm_host_map *map = &to_vmx(vcpu)->nested.msr_bitmap_map; in nested_vmx_prepare_msr_bitmap()
700 kvm_vcpu_unmap(vcpu, &to_vmx(vcpu)->nested.msr_bitmap_map, false); in nested_vmx_prepare_msr_bitmap()
921 u64 vmx_misc = vmx_control_msr(vmx->nested.msrs.misc_low, in nested_vmx_max_atomic_switch_msrs()
922 vmx->nested.msrs.misc_high); in nested_vmx_max_atomic_switch_msrs()
1162 (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); in nested_has_guest_tlb_tag()
1213 vmcs12->virtual_processor_id != vmx->nested.last_vpid) { in nested_vmx_transition_tlb_flush()
1214 vmx->nested.last_vpid = vmcs12->virtual_processor_id; in nested_vmx_transition_tlb_flush()
1234 u64 vmx_basic = vmx->nested.msrs.basic; in vmx_restore_vmx_basic()
1253 vmx->nested.msrs.basic = data; in vmx_restore_vmx_basic()
1265 lowp = &vmx->nested.msrs.pinbased_ctls_low; in vmx_restore_control_msr()
1266 highp = &vmx->nested.msrs.pinbased_ctls_high; in vmx_restore_control_msr()
1269 lowp = &vmx->nested.msrs.procbased_ctls_low; in vmx_restore_control_msr()
1270 highp = &vmx->nested.msrs.procbased_ctls_high; in vmx_restore_control_msr()
1273 lowp = &vmx->nested.msrs.exit_ctls_low; in vmx_restore_control_msr()
1274 highp = &vmx->nested.msrs.exit_ctls_high; in vmx_restore_control_msr()
1277 lowp = &vmx->nested.msrs.entry_ctls_low; in vmx_restore_control_msr()
1278 highp = &vmx->nested.msrs.entry_ctls_high; in vmx_restore_control_msr()
1281 lowp = &vmx->nested.msrs.secondary_ctls_low; in vmx_restore_control_msr()
1282 highp = &vmx->nested.msrs.secondary_ctls_high; in vmx_restore_control_msr()
1313 vmx_misc = vmx_control_msr(vmx->nested.msrs.misc_low, in vmx_restore_vmx_misc()
1314 vmx->nested.msrs.misc_high); in vmx_restore_vmx_misc()
1319 if ((vmx->nested.msrs.pinbased_ctls_high & in vmx_restore_vmx_misc()
1334 vmx->nested.msrs.misc_low = data; in vmx_restore_vmx_misc()
1335 vmx->nested.msrs.misc_high = data >> 32; in vmx_restore_vmx_misc()
1344 vmx_ept_vpid_cap = vmx_control_msr(vmx->nested.msrs.ept_caps, in vmx_restore_vmx_ept_vpid_cap()
1345 vmx->nested.msrs.vpid_caps); in vmx_restore_vmx_ept_vpid_cap()
1351 vmx->nested.msrs.ept_caps = data; in vmx_restore_vmx_ept_vpid_cap()
1352 vmx->nested.msrs.vpid_caps = data >> 32; in vmx_restore_vmx_ept_vpid_cap()
1362 msr = &vmx->nested.msrs.cr0_fixed0; in vmx_restore_fixed0_msr()
1365 msr = &vmx->nested.msrs.cr4_fixed0; in vmx_restore_fixed0_msr()
1395 if (vmx->nested.vmxon) in vmx_set_vmx_msr()
1436 vmx->nested.msrs.vmcs_enum = data; in vmx_set_vmx_msr()
1439 if (data & ~vmx->nested.msrs.vmfunc_controls) in vmx_set_vmx_msr()
1441 vmx->nested.msrs.vmfunc_controls = data; in vmx_set_vmx_msr()
1600 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_enlightened_to_vmcs12()
1601 struct hv_enlightened_vmcs *evmcs = vmx->nested.hv_evmcs; in copy_enlightened_to_vmcs12()
1816 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_vmcs12_to_enlightened()
1817 struct hv_enlightened_vmcs *evmcs = vmx->nested.hv_evmcs; in copy_vmcs12_to_enlightened()
1988 if (likely(!vmx->nested.enlightened_vmcs_enabled)) in nested_vmx_handle_enlightened_vmptrld()
1996 if (unlikely(evmcs_gpa != vmx->nested.hv_evmcs_vmptr)) { in nested_vmx_handle_enlightened_vmptrld()
1997 vmx->nested.current_vmptr = -1ull; in nested_vmx_handle_enlightened_vmptrld()
2002 &vmx->nested.hv_evmcs_map)) in nested_vmx_handle_enlightened_vmptrld()
2005 vmx->nested.hv_evmcs = vmx->nested.hv_evmcs_map.hva; in nested_vmx_handle_enlightened_vmptrld()
2029 if ((vmx->nested.hv_evmcs->revision_id != KVM_EVMCS_VERSION) && in nested_vmx_handle_enlightened_vmptrld()
2030 (vmx->nested.hv_evmcs->revision_id != VMCS12_REVISION)) { in nested_vmx_handle_enlightened_vmptrld()
2035 vmx->nested.hv_evmcs_vmptr = evmcs_gpa; in nested_vmx_handle_enlightened_vmptrld()
2057 vmx->nested.hv_evmcs->hv_clean_fields &= in nested_vmx_handle_enlightened_vmptrld()
2067 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in nested_sync_vmcs12_to_shadow()
2072 vmx->nested.need_vmcs12_to_shadow_sync = false; in nested_sync_vmcs12_to_shadow()
2078 container_of(timer, struct vcpu_vmx, nested.preemption_timer); in vmx_preemption_timer_fn()
2080 vmx->nested.preemption_timer_expired = true; in vmx_preemption_timer_fn()
2095 if (!vmx->nested.has_preemption_timer_deadline) { in vmx_calc_preemption_timer_value()
2096 vmx->nested.preemption_timer_deadline = in vmx_calc_preemption_timer_value()
2098 vmx->nested.has_preemption_timer_deadline = true; in vmx_calc_preemption_timer_value()
2100 return vmx->nested.preemption_timer_deadline - l1_scaled_tsc; in vmx_calc_preemption_timer_value()
2113 vmx_preemption_timer_fn(&vmx->nested.preemption_timer); in vmx_start_preemption_timer()
2123 hrtimer_start(&vmx->nested.preemption_timer, in vmx_start_preemption_timer()
2130 if (vmx->nested.nested_run_pending && in nested_vmx_calc_efer()
2147 if (vmx->nested.vmcs02_initialized) in prepare_vmcs02_constant_state()
2149 vmx->nested.vmcs02_initialized = true; in prepare_vmcs02_constant_state()
2168 vmcs_write64(MSR_BITMAP, __pa(vmx->nested.vmcs02.msr_bitmap)); in prepare_vmcs02_constant_state()
2203 if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) in prepare_vmcs02_early_rare()
2204 vmcs_write16(VIRTUAL_PROCESSOR_ID, vmx->nested.vpid02); in prepare_vmcs02_early_rare()
2216 if (vmx->nested.dirty_vmcs12 || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in prepare_vmcs02_early()
2227 vmx->nested.pi_pending = false; in prepare_vmcs02_early()
2229 vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; in prepare_vmcs02_early()
2243 vmx->nested.l1_tpr_threshold = -1; in prepare_vmcs02_early()
2356 if (vmx->nested.nested_run_pending) { in prepare_vmcs02_early()
2374 struct hv_enlightened_vmcs *hv_evmcs = vmx->nested.hv_evmcs; in prepare_vmcs02_rare()
2437 if (kvm_mpx_supported() && vmx->nested.nested_run_pending && in prepare_vmcs02_rare()
2508 if (vmx->nested.dirty_vmcs12 || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in prepare_vmcs02()
2510 vmx->nested.dirty_vmcs12 = false; in prepare_vmcs02()
2512 load_guest_pdptrs_vmcs12 = !evmptr_is_valid(vmx->nested.hv_evmcs_vmptr) || in prepare_vmcs02()
2513 !(vmx->nested.hv_evmcs->hv_clean_fields & in prepare_vmcs02()
2517 if (vmx->nested.nested_run_pending && in prepare_vmcs02()
2523 vmcs_write64(GUEST_IA32_DEBUGCTL, vmx->nested.vmcs01_debugctl); in prepare_vmcs02()
2525 if (kvm_mpx_supported() && (!vmx->nested.nested_run_pending || in prepare_vmcs02()
2527 vmcs_write64(GUEST_BNDCFGS, vmx->nested.vmcs01_guest_bndcfgs); in prepare_vmcs02()
2538 if (vmx->nested.nested_run_pending && in prepare_vmcs02()
2638 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in prepare_vmcs02()
2639 vmx->nested.hv_evmcs->hv_clean_fields |= in prepare_vmcs02()
2665 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPTP_UC_BIT))) in nested_vmx_check_eptp()
2669 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPTP_WB_BIT))) in nested_vmx_check_eptp()
2679 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPT_PAGE_WALK_5_BIT))) in nested_vmx_check_eptp()
2683 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPT_PAGE_WALK_4_BIT))) in nested_vmx_check_eptp()
2696 if (CC(!(vmx->nested.msrs.ept_caps & VMX_EPT_AD_BIT))) in nested_vmx_check_eptp()
2712 vmx->nested.msrs.pinbased_ctls_low, in nested_check_vm_execution_controls()
2713 vmx->nested.msrs.pinbased_ctls_high)) || in nested_check_vm_execution_controls()
2715 vmx->nested.msrs.procbased_ctls_low, in nested_check_vm_execution_controls()
2716 vmx->nested.msrs.procbased_ctls_high))) in nested_check_vm_execution_controls()
2721 vmx->nested.msrs.secondary_ctls_low, in nested_check_vm_execution_controls()
2722 vmx->nested.msrs.secondary_ctls_high))) in nested_check_vm_execution_controls()
2749 ~vmx->nested.msrs.vmfunc_controls)) in nested_check_vm_execution_controls()
2771 vmx->nested.msrs.exit_ctls_low, in nested_check_vm_exit_controls()
2772 vmx->nested.msrs.exit_ctls_high)) || in nested_check_vm_exit_controls()
2788 vmx->nested.msrs.entry_ctls_low, in nested_check_vm_entry_controls()
2789 vmx->nested.msrs.entry_ctls_high))) in nested_check_vm_entry_controls()
2862 if (to_vmx(vcpu)->nested.enlightened_vmcs_enabled) in nested_vmx_check_controls()
3023 if (to_vmx(vcpu)->nested.nested_run_pending && in nested_vmx_check_guest_state()
3131 if (vmx->nested.enlightened_vmcs_enabled && in nested_get_evmcs_page()
3132 vmx->nested.hv_evmcs_vmptr == EVMPTR_MAP_PENDING) { in nested_get_evmcs_page()
3144 vmx->nested.need_vmcs12_to_shadow_sync = true; in nested_get_evmcs_page()
3177 if (vmx->nested.apic_access_page) { /* shouldn't happen */ in nested_get_vmcs12_pages()
3178 kvm_release_page_clean(vmx->nested.apic_access_page); in nested_get_vmcs12_pages()
3179 vmx->nested.apic_access_page = NULL; in nested_get_vmcs12_pages()
3183 vmx->nested.apic_access_page = page; in nested_get_vmcs12_pages()
3184 hpa = page_to_phys(vmx->nested.apic_access_page); in nested_get_vmcs12_pages()
3198 map = &vmx->nested.virtual_apic_map; in nested_get_vmcs12_pages()
3224 map = &vmx->nested.pi_desc_map; in nested_get_vmcs12_pages()
3227 vmx->nested.pi_desc = in nested_get_vmcs12_pages()
3239 vmx->nested.pi_desc = NULL; in nested_get_vmcs12_pages()
3279 if (WARN_ON_ONCE(vmx->nested.pml_full)) in nested_vmx_write_pml_buffer()
3291 vmx->nested.pml_full = true; in nested_vmx_write_pml_buffer()
3316 if (!to_vmx(vcpu)->nested.vmxon) { in nested_vmx_check_permission()
3372 vmx->nested.vmcs01_debugctl = vmcs_read64(GUEST_IA32_DEBUGCTL); in nested_vmx_enter_non_root_mode()
3375 vmx->nested.vmcs01_guest_bndcfgs = vmcs_read64(GUEST_BNDCFGS); in nested_vmx_enter_non_root_mode()
3396 vmx_switch_vmcs(vcpu, &vmx->nested.vmcs02); in nested_vmx_enter_non_root_mode()
3469 vmx->nested.preemption_timer_expired = false; in nested_vmx_enter_non_root_mode()
3501 if (enable_shadow_vmcs || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in nested_vmx_enter_non_root_mode()
3502 vmx->nested.need_vmcs12_to_shadow_sync = true; in nested_vmx_enter_non_root_mode()
3529 if (CC(!evmptr_is_valid(vmx->nested.hv_evmcs_vmptr) && in nested_vmx_run()
3530 vmx->nested.current_vmptr == -1ull)) in nested_vmx_run()
3544 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) { in nested_vmx_run()
3545 copy_enlightened_to_vmcs12(vmx, vmx->nested.hv_evmcs->hv_clean_fields); in nested_vmx_run()
3580 vmx->nested.nested_run_pending = 1; in nested_vmx_run()
3581 vmx->nested.has_preemption_timer_deadline = false; in nested_vmx_run()
3588 kvm_apic_has_interrupt(vcpu) == vmx->nested.posted_intr_nv) { in nested_vmx_run()
3589 vmx->nested.pi_pending = true; in nested_vmx_run()
3591 kvm_apic_clear_irr(vcpu, vmx->nested.posted_intr_nv); in nested_vmx_run()
3620 vmx->nested.nested_run_pending = 0; in nested_vmx_run()
3625 vmx->nested.nested_run_pending = 0; in nested_vmx_run()
3635 vmx->nested.nested_run_pending = 0; in nested_vmx_run()
3752 if (!vmx->nested.pi_pending) in vmx_complete_nested_posted_interrupt()
3755 if (!vmx->nested.pi_desc) in vmx_complete_nested_posted_interrupt()
3758 vmx->nested.pi_pending = false; in vmx_complete_nested_posted_interrupt()
3760 if (!pi_test_and_clear_on(vmx->nested.pi_desc)) in vmx_complete_nested_posted_interrupt()
3763 max_irr = find_last_bit((unsigned long *)vmx->nested.pi_desc->pir, 256); in vmx_complete_nested_posted_interrupt()
3765 vapic_page = vmx->nested.virtual_apic_map.hva; in vmx_complete_nested_posted_interrupt()
3769 __kvm_apic_update_irr(vmx->nested.pi_desc->pir, in vmx_complete_nested_posted_interrupt()
3841 to_vmx(vcpu)->nested.preemption_timer_expired; in nested_vmx_preemption_timer_pending()
3849 vmx->nested.nested_run_pending || kvm_event_needs_reinjection(vcpu); in vmx_check_nested_events()
3850 bool mtf_pending = vmx->nested.mtf_pending; in vmx_check_nested_events()
3858 vmx->nested.mtf_pending = false; in vmx_check_nested_events()
3893 if (vmx->nested.nested_run_pending) in vmx_check_nested_events()
3910 if (vmx->nested.nested_run_pending) in vmx_check_nested_events()
3965 hrtimer_get_remaining(&to_vmx(vcpu)->nested.preemption_timer); in vmx_get_preemption_timer_value()
4067 vmx->nested.need_sync_vmcs02_to_vmcs12_rare = false; in sync_vmcs02_to_vmcs12_rare()
4076 if (!vmx->nested.need_sync_vmcs02_to_vmcs12_rare) in copy_vmcs02_to_vmcs12_rare()
4083 vmx->loaded_vmcs = &vmx->nested.vmcs02; in copy_vmcs02_to_vmcs12_rare()
4089 vmx_vcpu_load_vmcs(vcpu, cpu, &vmx->nested.vmcs02); in copy_vmcs02_to_vmcs12_rare()
4103 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in sync_vmcs02_to_vmcs12()
4106 vmx->nested.need_sync_vmcs02_to_vmcs12_rare = in sync_vmcs02_to_vmcs12()
4107 !evmptr_is_valid(vmx->nested.hv_evmcs_vmptr); in sync_vmcs02_to_vmcs12()
4131 !vmx->nested.nested_run_pending) in sync_vmcs02_to_vmcs12()
4503 WARN_ON_ONCE(vmx->nested.nested_run_pending); in nested_vmx_vmexit()
4533 hrtimer_cancel(&to_vmx(vcpu)->nested.preemption_timer); in nested_vmx_vmexit()
4579 if (vmx->nested.l1_tpr_threshold != -1) in nested_vmx_vmexit()
4580 vmcs_write32(TPR_THRESHOLD, vmx->nested.l1_tpr_threshold); in nested_vmx_vmexit()
4582 if (vmx->nested.change_vmcs01_virtual_apic_mode) { in nested_vmx_vmexit()
4583 vmx->nested.change_vmcs01_virtual_apic_mode = false; in nested_vmx_vmexit()
4587 if (vmx->nested.update_vmcs01_cpu_dirty_logging) { in nested_vmx_vmexit()
4588 vmx->nested.update_vmcs01_cpu_dirty_logging = false; in nested_vmx_vmexit()
4593 if (vmx->nested.apic_access_page) { in nested_vmx_vmexit()
4594 kvm_release_page_clean(vmx->nested.apic_access_page); in nested_vmx_vmexit()
4595 vmx->nested.apic_access_page = NULL; in nested_vmx_vmexit()
4597 kvm_vcpu_unmap(vcpu, &vmx->nested.virtual_apic_map, true); in nested_vmx_vmexit()
4598 kvm_vcpu_unmap(vcpu, &vmx->nested.pi_desc_map, true); in nested_vmx_vmexit()
4599 vmx->nested.pi_desc = NULL; in nested_vmx_vmexit()
4601 if (vmx->nested.reload_vmcs01_apic_access_page) { in nested_vmx_vmexit()
4602 vmx->nested.reload_vmcs01_apic_access_page = false; in nested_vmx_vmexit()
4607 (enable_shadow_vmcs || evmptr_is_valid(vmx->nested.hv_evmcs_vmptr))) in nested_vmx_vmexit()
4608 vmx->nested.need_vmcs12_to_shadow_sync = true; in nested_vmx_vmexit()
4799 vmx->nested.msrs.entry_ctls_high |= in nested_vmx_pmu_entry_exit_ctls_update()
4801 vmx->nested.msrs.exit_ctls_high |= in nested_vmx_pmu_entry_exit_ctls_update()
4804 vmx->nested.msrs.entry_ctls_high &= in nested_vmx_pmu_entry_exit_ctls_update()
4806 vmx->nested.msrs.exit_ctls_high &= in nested_vmx_pmu_entry_exit_ctls_update()
4865 r = alloc_loaded_vmcs(&vmx->nested.vmcs02); in enter_vmx_operation()
4869 vmx->nested.cached_vmcs12 = kzalloc(VMCS12_SIZE, GFP_KERNEL_ACCOUNT); in enter_vmx_operation()
4870 if (!vmx->nested.cached_vmcs12) in enter_vmx_operation()
4873 vmx->nested.cached_shadow_vmcs12 = kzalloc(VMCS12_SIZE, GFP_KERNEL_ACCOUNT); in enter_vmx_operation()
4874 if (!vmx->nested.cached_shadow_vmcs12) in enter_vmx_operation()
4880 hrtimer_init(&vmx->nested.preemption_timer, CLOCK_MONOTONIC, in enter_vmx_operation()
4882 vmx->nested.preemption_timer.function = vmx_preemption_timer_fn; in enter_vmx_operation()
4884 vmx->nested.vpid02 = allocate_vpid(); in enter_vmx_operation()
4886 vmx->nested.vmcs02_initialized = false; in enter_vmx_operation()
4887 vmx->nested.vmxon = true; in enter_vmx_operation()
4897 kfree(vmx->nested.cached_shadow_vmcs12); in enter_vmx_operation()
4900 kfree(vmx->nested.cached_vmcs12); in enter_vmx_operation()
4903 free_loaded_vmcs(&vmx->nested.vmcs02); in enter_vmx_operation()
4939 if (vmx->nested.vmxon) in handle_vmon()
4966 vmx->nested.vmxon_ptr = vmptr; in handle_vmon()
4978 if (vmx->nested.current_vmptr == -1ull) in nested_release_vmcs12()
4989 vmx->nested.posted_intr_nv = -1; in nested_release_vmcs12()
4993 vmx->nested.current_vmptr >> PAGE_SHIFT, in nested_release_vmcs12()
4994 vmx->nested.cached_vmcs12, 0, VMCS12_SIZE); in nested_release_vmcs12()
4998 vmx->nested.current_vmptr = -1ull; in nested_release_vmcs12()
5033 if (vmptr == vmx->nested.vmxon_ptr) in handle_vmclear()
5046 if (likely(!vmx->nested.enlightened_vmcs_enabled || in handle_vmclear()
5048 if (vmptr == vmx->nested.current_vmptr) in handle_vmclear()
5055 } else if (vmx->nested.hv_evmcs && vmptr == vmx->nested.hv_evmcs_vmptr) { in handle_vmclear()
5096 if (vmx->nested.current_vmptr == -1ull || in handle_vmread()
5188 if (vmx->nested.current_vmptr == -1ull || in handle_vmwrite()
5260 vmx->nested.dirty_vmcs12 = true; in handle_vmwrite()
5268 vmx->nested.current_vmptr = vmptr; in set_current_vmptr()
5273 vmx->nested.need_vmcs12_to_shadow_sync = true; in set_current_vmptr()
5275 vmx->nested.dirty_vmcs12 = true; in set_current_vmptr()
5294 if (vmptr == vmx->nested.vmxon_ptr) in handle_vmptrld()
5298 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in handle_vmptrld()
5301 if (vmx->nested.current_vmptr != vmptr) { in handle_vmptrld()
5332 memcpy(vmx->nested.cached_vmcs12, new_vmcs12, VMCS12_SIZE); in handle_vmptrld()
5346 gpa_t current_vmptr = to_vmx(vcpu)->nested.current_vmptr; in handle_vmptrst()
5354 if (unlikely(evmptr_is_valid(to_vmx(vcpu)->nested.hv_evmcs_vmptr))) in handle_vmptrst()
5383 if (!(vmx->nested.msrs.secondary_ctls_high & in handle_invept()
5385 !(vmx->nested.msrs.ept_caps & VMX_EPT_INVEPT_BIT)) { in handle_invept()
5396 types = (vmx->nested.msrs.ept_caps >> VMX_EPT_EXTENT_SHIFT) & 6; in handle_invept()
5463 if (!(vmx->nested.msrs.secondary_ctls_high & in handle_invvpid()
5465 !(vmx->nested.msrs.vpid_caps & VMX_VPID_INVVPID_BIT)) { in handle_invvpid()
5476 types = (vmx->nested.msrs.vpid_caps & in handle_invvpid()
6053 WARN_ON_ONCE(vmx->nested.nested_run_pending); in nested_vmx_reflect_vmexit()
6123 (vmx->nested.vmxon || vmx->nested.smm.vmxon)) { in vmx_get_nested_state()
6124 kvm_state.hdr.vmx.vmxon_pa = vmx->nested.vmxon_ptr; in vmx_get_nested_state()
6125 kvm_state.hdr.vmx.vmcs12_pa = vmx->nested.current_vmptr; in vmx_get_nested_state()
6131 if (vmx->nested.hv_evmcs_vmptr != EVMPTR_INVALID) in vmx_get_nested_state()
6140 if (vmx->nested.smm.vmxon) in vmx_get_nested_state()
6143 if (vmx->nested.smm.guest_mode) in vmx_get_nested_state()
6149 if (vmx->nested.nested_run_pending) in vmx_get_nested_state()
6152 if (vmx->nested.mtf_pending) in vmx_get_nested_state()
6156 vmx->nested.has_preemption_timer_deadline) { in vmx_get_nested_state()
6160 vmx->nested.preemption_timer_deadline; in vmx_get_nested_state()
6186 if (!vmx->nested.need_vmcs12_to_shadow_sync) { in vmx_get_nested_state()
6187 if (evmptr_is_valid(vmx->nested.hv_evmcs_vmptr)) in vmx_get_nested_state()
6227 to_vmx(vcpu)->nested.nested_run_pending = 0; in vmx_leave_nested()
6300 (!nested_vmx_allowed(vcpu) || !vmx->nested.enlightened_vmcs_enabled)) in vmx_set_nested_state()
6308 vmx->nested.vmxon_ptr = kvm_state->hdr.vmx.vmxon_pa; in vmx_set_nested_state()
6337 vmx->nested.hv_evmcs_vmptr = EVMPTR_MAP_PENDING; in vmx_set_nested_state()
6344 vmx->nested.smm.vmxon = true; in vmx_set_nested_state()
6345 vmx->nested.vmxon = false; in vmx_set_nested_state()
6348 vmx->nested.smm.guest_mode = true; in vmx_set_nested_state()
6361 vmx->nested.nested_run_pending = in vmx_set_nested_state()
6364 vmx->nested.mtf_pending = in vmx_set_nested_state()
6389 vmx->nested.has_preemption_timer_deadline = false; in vmx_set_nested_state()
6391 vmx->nested.has_preemption_timer_deadline = true; in vmx_set_nested_state()
6392 vmx->nested.preemption_timer_deadline = in vmx_set_nested_state()
6401 vmx->nested.dirty_vmcs12 = true; in vmx_set_nested_state()
6409 vmx->nested.nested_run_pending = 0; in vmx_set_nested_state()