Lines Matching refs:nested

57 	u64 cr3 = svm->nested.ctl.nested_cr3;  in nested_svm_get_tdp_pdptr()
72 return svm->nested.ctl.nested_cr3; in nested_svm_get_tdp_cr3()
78 struct vmcb *hsave = svm->nested.hsave; in nested_svm_init_mmu_context()
84 svm->nested.ctl.nested_cr3); in nested_svm_init_mmu_context()
109 h = &svm->nested.hsave->control; in recalc_intercepts()
110 g = &svm->nested.ctl; in recalc_intercepts()
175 if (!(vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_MSR_PROT))) in nested_svm_vmrun_msrpm()
186 offset = svm->nested.ctl.msrpm_base_pa + (p * 4); in nested_svm_vmrun_msrpm()
191 svm->nested.msrpm[p] = svm->msrpm[p] | value; in nested_svm_vmrun_msrpm()
194 svm->vmcb->control.msrpm_base_pa = __sme_set(__pa(svm->nested.msrpm)); in nested_svm_vmrun_msrpm()
266 copy_vmcb_control_area(&svm->nested.ctl, control); in load_nested_vmcb_control()
269 svm->nested.ctl.asid = control->asid; in load_nested_vmcb_control()
270 svm->nested.ctl.msrpm_base_pa &= ~0x0fffULL; in load_nested_vmcb_control()
271 svm->nested.ctl.iopm_base_pa &= ~0x0fffULL; in load_nested_vmcb_control()
281 svm->nested.ctl.event_inj = svm->vmcb->control.event_inj; in sync_nested_vmcb_control()
282 svm->nested.ctl.event_inj_err = svm->vmcb->control.event_inj_err; in sync_nested_vmcb_control()
286 if (!(svm->nested.ctl.int_ctl & V_INTR_MASKING_MASK) && in sync_nested_vmcb_control()
298 svm->nested.ctl.int_ctl &= ~mask; in sync_nested_vmcb_control()
299 svm->nested.ctl.int_ctl |= svm->vmcb->control.int_ctl & mask; in sync_nested_vmcb_control()
341 return svm->nested.ctl.nested_ctl & SVM_NESTED_CTL_NP_ENABLE; in nested_npt_enabled()
410 svm->vcpu.arch.l1_tsc_offset + svm->nested.ctl.tsc_offset; in nested_prepare_vmcb_control()
413 (svm->nested.ctl.int_ctl & ~mask) | in nested_prepare_vmcb_control()
414 (svm->nested.hsave->control.int_ctl & mask); in nested_prepare_vmcb_control()
416 svm->vmcb->control.virt_ext = svm->nested.ctl.virt_ext; in nested_prepare_vmcb_control()
417 svm->vmcb->control.int_vector = svm->nested.ctl.int_vector; in nested_prepare_vmcb_control()
418 svm->vmcb->control.int_state = svm->nested.ctl.int_state; in nested_prepare_vmcb_control()
419 svm->vmcb->control.event_inj = svm->nested.ctl.event_inj; in nested_prepare_vmcb_control()
420 svm->vmcb->control.event_inj_err = svm->nested.ctl.event_inj_err; in nested_prepare_vmcb_control()
422 svm->vmcb->control.pause_filter_count = svm->nested.ctl.pause_filter_count; in nested_prepare_vmcb_control()
423 svm->vmcb->control.pause_filter_thresh = svm->nested.ctl.pause_filter_thresh; in nested_prepare_vmcb_control()
442 svm->nested.vmcb12_gpa = vmcb12_gpa; in enter_svm_guest_mode()
461 struct vmcb *hsave = svm->nested.hsave; in nested_svm_vmrun()
484 if (WARN_ON_ONCE(!svm->nested.initialized)) in nested_svm_vmrun()
536 svm->nested.nested_run_pending = 1; in nested_svm_vmrun()
545 svm->nested.nested_run_pending = 0; in nested_svm_vmrun()
580 struct vmcb *hsave = svm->nested.hsave; in nested_svm_vmexit()
584 rc = kvm_vcpu_map(&svm->vcpu, gpa_to_gfn(svm->nested.vmcb12_gpa), &map); in nested_svm_vmexit()
595 svm->nested.vmcb12_gpa = 0; in nested_svm_vmexit()
596 WARN_ON_ONCE(svm->nested.nested_run_pending); in nested_svm_vmexit()
634 vmcb12->control.int_ctl = svm->nested.ctl.int_ctl; in nested_svm_vmexit()
635 vmcb12->control.tlb_ctl = svm->nested.ctl.tlb_ctl; in nested_svm_vmexit()
636 vmcb12->control.event_inj = svm->nested.ctl.event_inj; in nested_svm_vmexit()
637 vmcb12->control.event_inj_err = svm->nested.ctl.event_inj_err; in nested_svm_vmexit()
653 svm->nested.ctl.nested_cr3 = 0; in nested_svm_vmexit()
708 if (svm->nested.initialized) in svm_allocate_nested()
714 svm->nested.hsave = page_address(hsave_page); in svm_allocate_nested()
716 svm->nested.msrpm = svm_vcpu_alloc_msrpm(); in svm_allocate_nested()
717 if (!svm->nested.msrpm) in svm_allocate_nested()
719 svm_vcpu_init_msrpm(&svm->vcpu, svm->nested.msrpm); in svm_allocate_nested()
721 svm->nested.initialized = true; in svm_allocate_nested()
731 if (!svm->nested.initialized) in svm_free_nested()
734 svm_vcpu_free_msrpm(svm->nested.msrpm); in svm_free_nested()
735 svm->nested.msrpm = NULL; in svm_free_nested()
737 __free_page(virt_to_page(svm->nested.hsave)); in svm_free_nested()
738 svm->nested.hsave = NULL; in svm_free_nested()
740 svm->nested.initialized = false; in svm_free_nested()
749 struct vmcb *hsave = svm->nested.hsave; in svm_leave_nested()
752 svm->nested.nested_run_pending = 0; in svm_leave_nested()
766 if (!(vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_MSR_PROT))) in nested_svm_exit_handled_msr()
780 if (kvm_vcpu_read_guest(&svm->vcpu, svm->nested.ctl.msrpm_base_pa + offset, &value, 4)) in nested_svm_exit_handled_msr()
793 if (!(vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_IOIO_PROT))) in nested_svm_intercept_ioio()
799 gpa = svm->nested.ctl.iopm_base_pa + (port / 8); in nested_svm_intercept_ioio()
824 if (vmcb_is_intercept(&svm->nested.ctl, exit_code)) in nested_svm_intercept()
829 if (vmcb_is_intercept(&svm->nested.ctl, exit_code)) in nested_svm_intercept()
847 if (vmcb_is_intercept(&svm->nested.ctl, exit_code)) in nested_svm_intercept()
887 return (svm->nested.ctl.intercepts[INTERCEPT_EXCEPTION] & BIT(nr)); in nested_exit_on_exception()
955 return vmcb_is_intercept(&svm->nested.ctl, INTERCEPT_INIT); in nested_exit_on_init()
972 kvm_event_needs_reinjection(vcpu) || svm->nested.nested_run_pending; in svm_check_nested_events()
1075 kvm_state.hdr.svm.vmcb_pa = svm->nested.vmcb12_gpa; in svm_get_nested_state()
1079 if (svm->nested.nested_run_pending) in svm_get_nested_state()
1098 if (copy_to_user(&user_vmcb->control, &svm->nested.ctl, in svm_get_nested_state()
1101 if (copy_to_user(&user_vmcb->save, &svm->nested.hsave->save, in svm_get_nested_state()
1114 struct vmcb *hsave = svm->nested.hsave; in svm_set_nested_state()
1199 svm->nested.vmcb12_gpa = kvm_state->hdr.svm.vmcb_pa; in svm_set_nested_state()