Lines Matching refs:loaded_vmcs

239 				     struct loaded_vmcs *prev)  in vmx_sync_vmcs_host_state()
247 dest = &vmx->loaded_vmcs->host_state; in vmx_sync_vmcs_host_state()
257 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs()
260 struct loaded_vmcs *prev; in vmx_switch_vmcs()
263 if (WARN_ON_ONCE(vmx->loaded_vmcs == vmcs)) in vmx_switch_vmcs()
267 prev = vmx->loaded_vmcs; in vmx_switch_vmcs()
268 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs()
290 if (WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01)) in free_nested()
1517 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12()
1553 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow()
2207 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct loaded_vmcs *vmcs01, in prepare_vmcs02_early()
2366 vmx->loaded_vmcs->nmi_known_unmasked = in prepare_vmcs02_early()
3085 if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) { in nested_vmx_check_vmentry_hw()
3087 vmx->loaded_vmcs->host_state.cr3 = cr3; in nested_vmx_check_vmentry_hw()
3091 if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) { in nested_vmx_check_vmentry_hw()
3093 vmx->loaded_vmcs->host_state.cr4 = cr4; in nested_vmx_check_vmentry_hw()
4270 WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01); in copy_vmcs02_to_vmcs12_rare()
4273 vmx->loaded_vmcs = &vmx->nested.vmcs02; in copy_vmcs02_to_vmcs12_rare()
4278 vmx->loaded_vmcs = &vmx->vmcs01; in copy_vmcs02_to_vmcs12_rare()
5022 struct loaded_vmcs *loaded_vmcs = vmx->loaded_vmcs; in alloc_shadow_vmcs() local
5032 if (WARN_ON(loaded_vmcs != &vmx->vmcs01 || loaded_vmcs->shadow_vmcs)) in alloc_shadow_vmcs()
5033 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5035 loaded_vmcs->shadow_vmcs = alloc_vmcs(true); in alloc_shadow_vmcs()
5036 if (loaded_vmcs->shadow_vmcs) in alloc_shadow_vmcs()
5037 vmcs_clear(loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
5039 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
5467 vmcs_load(vmx->loaded_vmcs->vmcs); in handle_vmwrite()