Lines Matching refs:vmx

31 	struct vmx_pages *vmx = addr_gva2hva(vm, vmx_gva);  in vcpu_alloc_vmx()  local
34 vmx->vmxon = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
35 vmx->vmxon_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
36 vmx->vmxon_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmxon); in vcpu_alloc_vmx()
39 vmx->vmcs = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
40 vmx->vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx()
41 vmx->vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmcs); in vcpu_alloc_vmx()
44 vmx->msr = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
45 vmx->msr_hva = addr_gva2hva(vm, (uintptr_t)vmx->msr); in vcpu_alloc_vmx()
46 vmx->msr_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->msr); in vcpu_alloc_vmx()
47 memset(vmx->msr_hva, 0, getpagesize()); in vcpu_alloc_vmx()
50 vmx->shadow_vmcs = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
51 vmx->shadow_vmcs_hva = addr_gva2hva(vm, (uintptr_t)vmx->shadow_vmcs); in vcpu_alloc_vmx()
52 vmx->shadow_vmcs_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->shadow_vmcs); in vcpu_alloc_vmx()
55 vmx->vmread = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
56 vmx->vmread_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmread); in vcpu_alloc_vmx()
57 vmx->vmread_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmread); in vcpu_alloc_vmx()
58 memset(vmx->vmread_hva, 0, getpagesize()); in vcpu_alloc_vmx()
60 vmx->vmwrite = (void *)vm_vaddr_alloc(vm, getpagesize(), 0x10000, 0, 0); in vcpu_alloc_vmx()
61 vmx->vmwrite_hva = addr_gva2hva(vm, (uintptr_t)vmx->vmwrite); in vcpu_alloc_vmx()
62 vmx->vmwrite_gpa = addr_gva2gpa(vm, (uintptr_t)vmx->vmwrite); in vcpu_alloc_vmx()
63 memset(vmx->vmwrite_hva, 0, getpagesize()); in vcpu_alloc_vmx()
66 return vmx; in vcpu_alloc_vmx()
69 bool prepare_for_vmx_operation(struct vmx_pages *vmx) in prepare_for_vmx_operation() argument
106 *(uint32_t *)(vmx->vmxon) = vmcs_revision(); in prepare_for_vmx_operation()
107 if (vmxon(vmx->vmxon_gpa)) in prepare_for_vmx_operation()
111 *(uint32_t *)(vmx->vmcs) = vmcs_revision(); in prepare_for_vmx_operation()
112 if (vmclear(vmx->vmcs_gpa)) in prepare_for_vmx_operation()
115 if (vmptrld(vmx->vmcs_gpa)) in prepare_for_vmx_operation()
119 *(uint32_t *)(vmx->shadow_vmcs) = vmcs_revision() | 0x80000000ul; in prepare_for_vmx_operation()
120 if (vmclear(vmx->shadow_vmcs_gpa)) in prepare_for_vmx_operation()
129 static inline void init_vmcs_control_fields(struct vmx_pages *vmx) in init_vmcs_control_fields() argument
159 vmwrite(MSR_BITMAP, vmx->msr_gpa); in init_vmcs_control_fields()
160 vmwrite(VMREAD_BITMAP, vmx->vmread_gpa); in init_vmcs_control_fields()
161 vmwrite(VMWRITE_BITMAP, vmx->vmwrite_gpa); in init_vmcs_control_fields()
278 void prepare_vmcs(struct vmx_pages *vmx, void *guest_rip, void *guest_rsp) in prepare_vmcs() argument
280 init_vmcs_control_fields(vmx); in prepare_vmcs()