Lines Matching refs:loaded_vmcs
247 struct loaded_vmcs *prev) in vmx_sync_vmcs_host_state()
255 dest = &vmx->loaded_vmcs->host_state; in vmx_sync_vmcs_host_state()
265 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs()
268 struct loaded_vmcs *prev; in vmx_switch_vmcs()
271 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs()
275 prev = vmx->loaded_vmcs; in vmx_switch_vmcs()
276 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs()
298 if (WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01)) in free_nested()
1540 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12()
1576 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow()
2239 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct loaded_vmcs *vmcs01, in prepare_vmcs02_early()
2397 vmx->loaded_vmcs->nmi_known_unmasked = in prepare_vmcs02_early()
3113 if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) { in nested_vmx_check_vmentry_hw()
3115 vmx->loaded_vmcs->host_state.cr3 = cr3; in nested_vmx_check_vmentry_hw()
3119 if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) { in nested_vmx_check_vmentry_hw()
3121 vmx->loaded_vmcs->host_state.cr4 = cr4; in nested_vmx_check_vmentry_hw()
4309 WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01); in copy_vmcs02_to_vmcs12_rare()
4312 vmx->loaded_vmcs = &vmx->nested.vmcs02; in copy_vmcs02_to_vmcs12_rare()
4317 vmx->loaded_vmcs = &vmx->vmcs01; in copy_vmcs02_to_vmcs12_rare()
5072 struct loaded_vmcs *loaded_vmcs = vmx->loaded_vmcs; in alloc_shadow_vmcs() local
5082 if (WARN_ON(loaded_vmcs != &vmx->vmcs01 || loaded_vmcs->shadow_vmcs)) in alloc_shadow_vmcs()
5083 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5085 loaded_vmcs->shadow_vmcs = alloc_vmcs(true); in alloc_shadow_vmcs()
5086 if (loaded_vmcs->shadow_vmcs) in alloc_shadow_vmcs()
5087 vmcs_clear(loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
5089 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5547 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()