Searched refs:vmxon (Results 1 – 5 of 5) sorted by relevance
81 vmx->vmxon = (void *)vm_vaddr_alloc_page(vm); in vcpu_alloc_vmx()82 vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()83 vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()153 *(uint32_t *)(vmx->vmxon) = vmcs_revision(); in prepare_for_vmx_operation()154 if (vmxon(vmx->vmxon_gpa)) in prepare_for_vmx_operation()
297 static inline int vmxon(uint64_t phys) in vmxon() function503 void *vmxon; member
118 bool vmxon; member239 bool vmxon; member
301 if (!vmx->nested.vmxon && !vmx->nested.smm.vmxon) in free_nested()306 vmx->nested.vmxon = false; in free_nested()307 vmx->nested.smm.vmxon = false; in free_nested()1376 if (vmx->nested.vmxon) in vmx_set_vmx_msr()3351 if (!to_vmx(vcpu)->nested.vmxon) { in nested_vmx_check_permission()5120 vmx->nested.vmxon = true; in enter_vmx_operation()5188 if (vmx->nested.vmxon) in handle_vmxon()6430 (vmx->nested.vmxon || vmx->nested.smm.vmxon)) { in vmx_get_nested_state()6447 if (vmx->nested.smm.vmxon) in vmx_get_nested_state()6649 vmx->nested.smm.vmxon = true; in vmx_set_nested_state()[all …]
2367 vmx->nested.vmxon) in vmx_set_msr()3265 if (to_vmx(vcpu)->nested.vmxon) in vmx_is_valid_cr0()3421 if (to_vmx(vcpu)->nested.vmxon && !nested_cr4_valid(vcpu, cr4)) in vmx_is_valid_cr4()8136 vmx->nested.smm.vmxon = vmx->nested.vmxon; in vmx_enter_smm()8137 vmx->nested.vmxon = false; in vmx_enter_smm()8147 if (vmx->nested.smm.vmxon) { in vmx_leave_smm()8148 vmx->nested.vmxon = true; in vmx_leave_smm()8149 vmx->nested.smm.vmxon = false; in vmx_leave_smm()8171 return to_vmx(vcpu)->nested.vmxon && !is_guest_mode(vcpu); in vmx_apic_init_signal_blocked()