Home
last modified time | relevance | path

Searched refs:to_vmx (Results 1 – 5 of 5) sorted by relevance

/Linux-v5.4/arch/x86/kvm/vmx/
Dnested.h39 return to_vmx(vcpu)->nested.cached_vmcs12; in get_vmcs12()
44 return to_vmx(vcpu)->nested.cached_shadow_vmcs12; in get_shadow_vmcs12()
49 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_has_valid_vmcs12()
118 return vmx_misc_cr3_count(to_vmx(vcpu)->nested.msrs.misc_low); in nested_cpu_vmx_misc_cr3_count()
128 return to_vmx(vcpu)->nested.msrs.misc_low & in nested_cpu_has_vmwrite_any_field()
134 return to_vmx(vcpu)->nested.msrs.misc_low & VMX_MISC_ZERO_LEN_INS; in nested_cpu_has_zero_length_injection()
139 return to_vmx(vcpu)->nested.msrs.procbased_ctls_high & in nested_cpu_supports_monitor_trap_flag()
145 return to_vmx(vcpu)->nested.msrs.secondary_ctls_high & in nested_cpu_has_vmx_shadow_vmcs()
261 u64 fixed0 = to_vmx(vcpu)->nested.msrs.cr0_fixed0; in nested_guest_cr0_valid()
262 u64 fixed1 = to_vmx(vcpu)->nested.msrs.cr0_fixed1; in nested_guest_cr0_valid()
[all …]
Dvmx.c468 tmp_eptp = to_vmx(vcpu)->ept_pointer; in check_ept_pointer_match()
469 } else if (tmp_eptp != to_vmx(vcpu)->ept_pointer) { in check_ept_pointer_match()
491 u64 ept_pointer = to_vmx(vcpu)->ept_pointer; in __hv_remote_flush_tlb_with_range()
519 if (VALID_PAGE(to_vmx(vcpu)->ept_pointer)) in hv_remote_flush_tlb_with_range()
551 evmcs = (struct hv_enlightened_vmcs *)to_vmx(vcpu)->loaded_vmcs->vmcs; in hv_enable_direct_tlbflush()
792 if (to_vmx(vcpu)->rmode.vm86_active) in update_exception_bitmap()
819 msr_bitmap = to_vmx(vcpu)->loaded_vmcs->msr_bitmap; in msr_write_intercepted()
1132 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_prepare_switch_to_guest()
1314 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_load_vmcs()
1381 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_vcpu_load()
[all …]
Dnested.c173 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_vmx_failValid()
220 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_release_evmcs()
236 struct vcpu_vmx *vmx = to_vmx(vcpu); in free_nested()
295 struct vcpu_vmx *vmx = to_vmx(vcpu); in vmx_switch_vmcs()
320 vmx_switch_vmcs(vcpu, &to_vmx(vcpu)->vmcs01); in nested_vmx_free_vcpu()
329 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_ept_inject_page_fault()
352 to_vmx(vcpu)->nested.msrs.ept_caps & in nested_ept_init_mmu_context()
429 !to_vmx(vcpu)->nested.nested_run_pending) { in vmx_inject_page_fault_nested()
493 msr_bitmap = to_vmx(vcpu)->vmcs01.msr_bitmap; in msr_write_intercepted_l01()
566 unsigned long *msr_bitmap_l0 = to_vmx(vcpu)->nested.vmcs02.msr_bitmap; in nested_vmx_prepare_msr_bitmap()
[all …]
Dvmx.h465 static inline struct vcpu_vmx *to_vmx(struct kvm_vcpu *vcpu) in to_vmx() function
472 return &(to_vmx(vcpu)->pi_desc); in vcpu_to_pi_desc()
505 __vmx_flush_tlb(vcpu, to_vmx(vcpu)->vpid, invalidate_gpa); in vmx_flush_tlb()
Devmcs.c336 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_get_evmcs_version()
352 struct vcpu_vmx *vmx = to_vmx(vcpu); in nested_enable_evmcs()