Lines Matching refs:loaded_vmcs

393 struct loaded_vmcs {  struct
841 struct loaded_vmcs vmcs02;
985 struct loaded_vmcs vmcs01;
986 struct loaded_vmcs *loaded_vmcs; member
987 struct loaded_vmcs *loaded_cpu_state;
2131 static inline void loaded_vmcs_init(struct loaded_vmcs *loaded_vmcs) in loaded_vmcs_init() argument
2133 vmcs_clear(loaded_vmcs->vmcs); in loaded_vmcs_init()
2134 if (loaded_vmcs->shadow_vmcs && loaded_vmcs->launched) in loaded_vmcs_init()
2135 vmcs_clear(loaded_vmcs->shadow_vmcs); in loaded_vmcs_init()
2136 loaded_vmcs->cpu = -1; in loaded_vmcs_init()
2137 loaded_vmcs->launched = 0; in loaded_vmcs_init()
2182 struct loaded_vmcs *v; in crash_vmclear_local_loaded_vmcss()
2198 struct loaded_vmcs *loaded_vmcs = arg; in __loaded_vmcs_clear() local
2201 if (loaded_vmcs->cpu != cpu) in __loaded_vmcs_clear()
2203 if (per_cpu(current_vmcs, cpu) == loaded_vmcs->vmcs) in __loaded_vmcs_clear()
2206 list_del(&loaded_vmcs->loaded_vmcss_on_cpu_link); in __loaded_vmcs_clear()
2216 loaded_vmcs_init(loaded_vmcs); in __loaded_vmcs_clear()
2220 static void loaded_vmcs_clear(struct loaded_vmcs *loaded_vmcs) in loaded_vmcs_clear() argument
2222 int cpu = loaded_vmcs->cpu; in loaded_vmcs_clear()
2226 __loaded_vmcs_clear, loaded_vmcs, 1); in loaded_vmcs_clear()
2611 msr_bitmap = to_vmx(vcpu)->loaded_vmcs->msr_bitmap; in msr_write_intercepted()
2879 vmx->loaded_cpu_state = vmx->loaded_vmcs; in vmx_prepare_switch_to_guest()
2950 WARN_ON_ONCE(vmx->loaded_cpu_state != vmx->loaded_vmcs); in vmx_prepare_switch_to_host()
3061 bool already_loaded = vmx->loaded_vmcs->cpu == cpu; in vmx_vcpu_load()
3064 loaded_vmcs_clear(vmx->loaded_vmcs); in vmx_vcpu_load()
3075 list_add(&vmx->loaded_vmcs->loaded_vmcss_on_cpu_link, in vmx_vcpu_load()
3081 if (per_cpu(current_vmcs, cpu) != vmx->loaded_vmcs->vmcs) { in vmx_vcpu_load()
3082 per_cpu(current_vmcs, cpu) = vmx->loaded_vmcs->vmcs; in vmx_vcpu_load()
3083 vmcs_load(vmx->loaded_vmcs->vmcs); in vmx_vcpu_load()
3112 vmx->loaded_vmcs->cpu = cpu; in vmx_vcpu_load()
4458 struct loaded_vmcs *v, *n; in vmclear_local_loaded_vmcss()
4746 static void free_loaded_vmcs(struct loaded_vmcs *loaded_vmcs) in free_loaded_vmcs() argument
4748 if (!loaded_vmcs->vmcs) in free_loaded_vmcs()
4750 loaded_vmcs_clear(loaded_vmcs); in free_loaded_vmcs()
4751 free_vmcs(loaded_vmcs->vmcs); in free_loaded_vmcs()
4752 loaded_vmcs->vmcs = NULL; in free_loaded_vmcs()
4753 if (loaded_vmcs->msr_bitmap) in free_loaded_vmcs()
4754 free_page((unsigned long)loaded_vmcs->msr_bitmap); in free_loaded_vmcs()
4755 WARN_ON(loaded_vmcs->shadow_vmcs != NULL); in free_loaded_vmcs()
4763 static int alloc_loaded_vmcs(struct loaded_vmcs *loaded_vmcs) in alloc_loaded_vmcs() argument
4765 loaded_vmcs->vmcs = alloc_vmcs(false); in alloc_loaded_vmcs()
4766 if (!loaded_vmcs->vmcs) in alloc_loaded_vmcs()
4769 loaded_vmcs->shadow_vmcs = NULL; in alloc_loaded_vmcs()
4770 loaded_vmcs_init(loaded_vmcs); in alloc_loaded_vmcs()
4773 loaded_vmcs->msr_bitmap = (unsigned long *)__get_free_page(GFP_KERNEL); in alloc_loaded_vmcs()
4774 if (!loaded_vmcs->msr_bitmap) in alloc_loaded_vmcs()
4776 memset(loaded_vmcs->msr_bitmap, 0xff, PAGE_SIZE); in alloc_loaded_vmcs()
4782 (struct hv_enlightened_vmcs *)loaded_vmcs->vmcs; in alloc_loaded_vmcs()
4788 memset(&loaded_vmcs->host_state, 0, sizeof(struct vmcs_host_state)); in alloc_loaded_vmcs()
4793 free_loaded_vmcs(loaded_vmcs); in alloc_loaded_vmcs()
6304 vmx->loaded_vmcs->host_state.cr3 = cr3; in vmx_set_constant_host_state()
6309 vmx->loaded_vmcs->host_state.cr4 = cr4; in vmx_set_constant_host_state()
6859 vmx->loaded_vmcs->soft_vnmi_blocked = 1; in vmx_inject_nmi()
6860 vmx->loaded_vmcs->vnmi_blocked_time = 0; in vmx_inject_nmi()
6864 vmx->loaded_vmcs->nmi_known_unmasked = false; in vmx_inject_nmi()
6884 return vmx->loaded_vmcs->soft_vnmi_blocked; in vmx_get_nmi_mask()
6885 if (vmx->loaded_vmcs->nmi_known_unmasked) in vmx_get_nmi_mask()
6888 vmx->loaded_vmcs->nmi_known_unmasked = !masked; in vmx_get_nmi_mask()
6897 if (vmx->loaded_vmcs->soft_vnmi_blocked != masked) { in vmx_set_nmi_mask()
6898 vmx->loaded_vmcs->soft_vnmi_blocked = masked; in vmx_set_nmi_mask()
6899 vmx->loaded_vmcs->vnmi_blocked_time = 0; in vmx_set_nmi_mask()
6902 vmx->loaded_vmcs->nmi_known_unmasked = !masked; in vmx_set_nmi_mask()
6918 to_vmx(vcpu)->loaded_vmcs->soft_vnmi_blocked) in vmx_nmi_allowed()
8251 struct loaded_vmcs *loaded_vmcs = vmx->loaded_vmcs; in alloc_shadow_vmcs() local
8259 WARN_ON(loaded_vmcs == &vmx->vmcs01 && loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
8261 if (!loaded_vmcs->shadow_vmcs) { in alloc_shadow_vmcs()
8262 loaded_vmcs->shadow_vmcs = alloc_vmcs(true); in alloc_shadow_vmcs()
8263 if (loaded_vmcs->shadow_vmcs) in alloc_shadow_vmcs()
8264 vmcs_clear(loaded_vmcs->shadow_vmcs); in alloc_shadow_vmcs()
8266 return loaded_vmcs->shadow_vmcs; in alloc_shadow_vmcs()
8652 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_shadow_to_vmcs12()
8683 vmcs_load(vmx->loaded_vmcs->vmcs); in copy_vmcs12_to_shadow()
10108 vmx->loaded_vmcs->soft_vnmi_blocked)) { in vmx_handle_exit()
10110 vmx->loaded_vmcs->soft_vnmi_blocked = 0; in vmx_handle_exit()
10111 } else if (vmx->loaded_vmcs->vnmi_blocked_time > 1000000000LL && in vmx_handle_exit()
10122 vmx->loaded_vmcs->soft_vnmi_blocked = 0; in vmx_handle_exit()
10498 if (vmx->loaded_vmcs->nmi_known_unmasked) in vmx_recover_nmi_blocking()
10522 vmx->loaded_vmcs->nmi_known_unmasked = in vmx_recover_nmi_blocking()
10525 } else if (unlikely(vmx->loaded_vmcs->soft_vnmi_blocked)) in vmx_recover_nmi_blocking()
10526 vmx->loaded_vmcs->vnmi_blocked_time += in vmx_recover_nmi_blocking()
10528 vmx->loaded_vmcs->entry_time)); in vmx_recover_nmi_blocking()
10623 if (!vmx->loaded_vmcs->hv_timer_armed) in vmx_arm_hv_timer()
10626 vmx->loaded_vmcs->hv_timer_armed = true; in vmx_arm_hv_timer()
10653 if (vmx->loaded_vmcs->hv_timer_armed) in vmx_update_hv_timer()
10656 vmx->loaded_vmcs->hv_timer_armed = false; in vmx_update_hv_timer()
10666 vmx->loaded_vmcs->soft_vnmi_blocked)) in vmx_vcpu_run()
10667 vmx->loaded_vmcs->entry_time = ktime_get(); in vmx_vcpu_run()
10690 if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) { in vmx_vcpu_run()
10692 vmx->loaded_vmcs->host_state.cr3 = cr3; in vmx_vcpu_run()
10696 if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) { in vmx_vcpu_run()
10698 vmx->loaded_vmcs->host_state.cr4 = cr4; in vmx_vcpu_run()
10726 vmx->__launched = vmx->loaded_vmcs->launched; in vmx_vcpu_run()
10928 vmx->loaded_vmcs->launched = 1; in vmx_vcpu_run()
10948 static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs) in vmx_switch_vmcs()
10953 if (vmx->loaded_vmcs == vmcs) in vmx_switch_vmcs()
10958 vmx->loaded_vmcs = vmcs; in vmx_switch_vmcs()
10986 free_loaded_vmcs(vmx->loaded_vmcs); in vmx_free_vcpu()
11042 vmx->loaded_vmcs = &vmx->vmcs01; in vmx_create_vcpu()
11080 free_loaded_vmcs(vmx->loaded_vmcs); in vmx_create_vcpu()
12141 vmx->loaded_vmcs->nmi_known_unmasked = in prepare_vmcs02()
12153 vmx->loaded_vmcs->hv_timer_armed = false; in prepare_vmcs02()