Lines Matching refs:vmcs12

57 #define SHADOW_FIELD_RO(x, y) { x, offsetof(struct vmcs12, y) },
64 #define SHADOW_FIELD_RW(x, y) { x, offsetof(struct vmcs12, y) },
328 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_ept_inject_page_fault() local
343 vmcs12->guest_physical_address = fault->address; in nested_ept_inject_page_fault()
370 static bool nested_vmx_is_page_fault_vmexit(struct vmcs12 *vmcs12, in nested_vmx_is_page_fault_vmexit() argument
375 bit = (vmcs12->exception_bitmap & (1u << PF_VECTOR)) != 0; in nested_vmx_is_page_fault_vmexit()
377 (error_code & vmcs12->page_fault_error_code_mask) != in nested_vmx_is_page_fault_vmexit()
378 vmcs12->page_fault_error_code_match; in nested_vmx_is_page_fault_vmexit()
389 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_check_exception() local
399 if (nested_vmx_is_page_fault_vmexit(vmcs12, in nested_vmx_check_exception()
404 } else if (vmcs12->exception_bitmap & (1u << nr)) { in nested_vmx_check_exception()
424 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in vmx_inject_page_fault_nested() local
428 if (nested_vmx_is_page_fault_vmexit(vmcs12, fault->error_code) && in vmx_inject_page_fault_nested()
430 vmcs12->vm_exit_intr_error_code = fault->error_code; in vmx_inject_page_fault_nested()
446 struct vmcs12 *vmcs12) in nested_vmx_check_io_bitmap_controls() argument
448 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_check_io_bitmap_controls()
451 if (CC(!page_address_valid(vcpu, vmcs12->io_bitmap_a)) || in nested_vmx_check_io_bitmap_controls()
452 CC(!page_address_valid(vcpu, vmcs12->io_bitmap_b))) in nested_vmx_check_io_bitmap_controls()
459 struct vmcs12 *vmcs12) in nested_vmx_check_msr_bitmap_controls() argument
461 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_check_msr_bitmap_controls()
464 if (CC(!page_address_valid(vcpu, vmcs12->msr_bitmap))) in nested_vmx_check_msr_bitmap_controls()
471 struct vmcs12 *vmcs12) in nested_vmx_check_tpr_shadow_controls() argument
473 if (!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) in nested_vmx_check_tpr_shadow_controls()
476 if (CC(!page_address_valid(vcpu, vmcs12->virtual_apic_page_addr))) in nested_vmx_check_tpr_shadow_controls()
562 struct vmcs12 *vmcs12) in nested_vmx_prepare_msr_bitmap() argument
571 !nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_prepare_msr_bitmap()
574 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->msr_bitmap), map)) in nested_vmx_prepare_msr_bitmap()
586 if (nested_cpu_has_virt_x2apic_mode(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
587 if (nested_cpu_has_apic_reg_virt(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
606 if (nested_cpu_has_vid(vmcs12)) { in nested_vmx_prepare_msr_bitmap()
659 struct vmcs12 *vmcs12) in nested_cache_shadow_vmcs12() argument
662 struct vmcs12 *shadow; in nested_cache_shadow_vmcs12()
664 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_cache_shadow_vmcs12()
665 vmcs12->vmcs_link_pointer == -1ull) in nested_cache_shadow_vmcs12()
670 if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map)) in nested_cache_shadow_vmcs12()
678 struct vmcs12 *vmcs12) in nested_flush_cached_shadow_vmcs12() argument
682 if (!nested_cpu_has_shadow_vmcs(vmcs12) || in nested_flush_cached_shadow_vmcs12()
683 vmcs12->vmcs_link_pointer == -1ull) in nested_flush_cached_shadow_vmcs12()
686 kvm_write_guest(vmx->vcpu.kvm, vmcs12->vmcs_link_pointer, in nested_flush_cached_shadow_vmcs12()
706 struct vmcs12 *vmcs12) in nested_vmx_check_apic_access_controls() argument
708 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES) && in nested_vmx_check_apic_access_controls()
709 CC(!page_address_valid(vcpu, vmcs12->apic_access_addr))) in nested_vmx_check_apic_access_controls()
716 struct vmcs12 *vmcs12) in nested_vmx_check_apicv_controls() argument
718 if (!nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
719 !nested_cpu_has_apic_reg_virt(vmcs12) && in nested_vmx_check_apicv_controls()
720 !nested_cpu_has_vid(vmcs12) && in nested_vmx_check_apicv_controls()
721 !nested_cpu_has_posted_intr(vmcs12)) in nested_vmx_check_apicv_controls()
728 if (CC(nested_cpu_has_virt_x2apic_mode(vmcs12) && in nested_vmx_check_apicv_controls()
729 nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES))) in nested_vmx_check_apicv_controls()
736 if (CC(nested_cpu_has_vid(vmcs12) && !nested_exit_on_intr(vcpu))) in nested_vmx_check_apicv_controls()
746 if (nested_cpu_has_posted_intr(vmcs12) && in nested_vmx_check_apicv_controls()
747 (CC(!nested_cpu_has_vid(vmcs12)) || in nested_vmx_check_apicv_controls()
749 CC((vmcs12->posted_intr_nv & 0xff00)) || in nested_vmx_check_apicv_controls()
750 CC((vmcs12->posted_intr_desc_addr & 0x3f)) || in nested_vmx_check_apicv_controls()
751 CC((vmcs12->posted_intr_desc_addr >> cpuid_maxphyaddr(vcpu))))) in nested_vmx_check_apicv_controls()
755 if (CC(!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW))) in nested_vmx_check_apicv_controls()
777 struct vmcs12 *vmcs12) in nested_vmx_check_exit_msr_switch_controls() argument
780 vmcs12->vm_exit_msr_load_count, in nested_vmx_check_exit_msr_switch_controls()
781 vmcs12->vm_exit_msr_load_addr)) || in nested_vmx_check_exit_msr_switch_controls()
783 vmcs12->vm_exit_msr_store_count, in nested_vmx_check_exit_msr_switch_controls()
784 vmcs12->vm_exit_msr_store_addr))) in nested_vmx_check_exit_msr_switch_controls()
791 struct vmcs12 *vmcs12) in nested_vmx_check_entry_msr_switch_controls() argument
794 vmcs12->vm_entry_msr_load_count, in nested_vmx_check_entry_msr_switch_controls()
795 vmcs12->vm_entry_msr_load_addr))) in nested_vmx_check_entry_msr_switch_controls()
802 struct vmcs12 *vmcs12) in nested_vmx_check_pml_controls() argument
804 if (!nested_cpu_has_pml(vmcs12)) in nested_vmx_check_pml_controls()
807 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_vmx_check_pml_controls()
808 CC(!page_address_valid(vcpu, vmcs12->pml_address))) in nested_vmx_check_pml_controls()
815 struct vmcs12 *vmcs12) in nested_vmx_check_unrestricted_guest_controls() argument
817 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST) && in nested_vmx_check_unrestricted_guest_controls()
818 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_unrestricted_guest_controls()
824 struct vmcs12 *vmcs12) in nested_vmx_check_mode_based_ept_exec_controls() argument
826 if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_MODE_BASED_EPT_EXEC) && in nested_vmx_check_mode_based_ept_exec_controls()
827 !nested_cpu_has_ept(vmcs12))) in nested_vmx_check_mode_based_ept_exec_controls()
833 struct vmcs12 *vmcs12) in nested_vmx_check_shadow_vmcs_controls() argument
835 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_check_shadow_vmcs_controls()
838 if (CC(!page_address_valid(vcpu, vmcs12->vmread_bitmap)) || in nested_vmx_check_shadow_vmcs_controls()
839 CC(!page_address_valid(vcpu, vmcs12->vmwrite_bitmap))) in nested_vmx_check_shadow_vmcs_controls()
1035 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_has_guest_tlb_tag() local
1037 return nested_cpu_has_ept(vmcs12) || in nested_has_guest_tlb_tag()
1038 (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); in nested_has_guest_tlb_tag()
1369 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_shadow_to_vmcs12() local
1384 vmcs12_write_any(vmcs12, field.encoding, field.offset, val); in copy_shadow_to_vmcs12()
1404 struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); in copy_vmcs12_to_shadow() local
1417 val = vmcs12_read_any(vmcs12, field.encoding, in copy_vmcs12_to_shadow()
1429 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_enlightened_to_vmcs12() local
1433 vmcs12->tpr_threshold = evmcs->tpr_threshold; in copy_enlightened_to_vmcs12()
1434 vmcs12->guest_rip = evmcs->guest_rip; in copy_enlightened_to_vmcs12()
1438 vmcs12->guest_rsp = evmcs->guest_rsp; in copy_enlightened_to_vmcs12()
1439 vmcs12->guest_rflags = evmcs->guest_rflags; in copy_enlightened_to_vmcs12()
1440 vmcs12->guest_interruptibility_info = in copy_enlightened_to_vmcs12()
1446 vmcs12->cpu_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1452 vmcs12->exception_bitmap = evmcs->exception_bitmap; in copy_enlightened_to_vmcs12()
1457 vmcs12->vm_entry_controls = evmcs->vm_entry_controls; in copy_enlightened_to_vmcs12()
1462 vmcs12->vm_entry_intr_info_field = in copy_enlightened_to_vmcs12()
1464 vmcs12->vm_entry_exception_error_code = in copy_enlightened_to_vmcs12()
1466 vmcs12->vm_entry_instruction_len = in copy_enlightened_to_vmcs12()
1472 vmcs12->host_ia32_pat = evmcs->host_ia32_pat; in copy_enlightened_to_vmcs12()
1473 vmcs12->host_ia32_efer = evmcs->host_ia32_efer; in copy_enlightened_to_vmcs12()
1474 vmcs12->host_cr0 = evmcs->host_cr0; in copy_enlightened_to_vmcs12()
1475 vmcs12->host_cr3 = evmcs->host_cr3; in copy_enlightened_to_vmcs12()
1476 vmcs12->host_cr4 = evmcs->host_cr4; in copy_enlightened_to_vmcs12()
1477 vmcs12->host_ia32_sysenter_esp = evmcs->host_ia32_sysenter_esp; in copy_enlightened_to_vmcs12()
1478 vmcs12->host_ia32_sysenter_eip = evmcs->host_ia32_sysenter_eip; in copy_enlightened_to_vmcs12()
1479 vmcs12->host_rip = evmcs->host_rip; in copy_enlightened_to_vmcs12()
1480 vmcs12->host_ia32_sysenter_cs = evmcs->host_ia32_sysenter_cs; in copy_enlightened_to_vmcs12()
1481 vmcs12->host_es_selector = evmcs->host_es_selector; in copy_enlightened_to_vmcs12()
1482 vmcs12->host_cs_selector = evmcs->host_cs_selector; in copy_enlightened_to_vmcs12()
1483 vmcs12->host_ss_selector = evmcs->host_ss_selector; in copy_enlightened_to_vmcs12()
1484 vmcs12->host_ds_selector = evmcs->host_ds_selector; in copy_enlightened_to_vmcs12()
1485 vmcs12->host_fs_selector = evmcs->host_fs_selector; in copy_enlightened_to_vmcs12()
1486 vmcs12->host_gs_selector = evmcs->host_gs_selector; in copy_enlightened_to_vmcs12()
1487 vmcs12->host_tr_selector = evmcs->host_tr_selector; in copy_enlightened_to_vmcs12()
1492 vmcs12->pin_based_vm_exec_control = in copy_enlightened_to_vmcs12()
1494 vmcs12->vm_exit_controls = evmcs->vm_exit_controls; in copy_enlightened_to_vmcs12()
1495 vmcs12->secondary_vm_exec_control = in copy_enlightened_to_vmcs12()
1501 vmcs12->io_bitmap_a = evmcs->io_bitmap_a; in copy_enlightened_to_vmcs12()
1502 vmcs12->io_bitmap_b = evmcs->io_bitmap_b; in copy_enlightened_to_vmcs12()
1507 vmcs12->msr_bitmap = evmcs->msr_bitmap; in copy_enlightened_to_vmcs12()
1512 vmcs12->guest_es_base = evmcs->guest_es_base; in copy_enlightened_to_vmcs12()
1513 vmcs12->guest_cs_base = evmcs->guest_cs_base; in copy_enlightened_to_vmcs12()
1514 vmcs12->guest_ss_base = evmcs->guest_ss_base; in copy_enlightened_to_vmcs12()
1515 vmcs12->guest_ds_base = evmcs->guest_ds_base; in copy_enlightened_to_vmcs12()
1516 vmcs12->guest_fs_base = evmcs->guest_fs_base; in copy_enlightened_to_vmcs12()
1517 vmcs12->guest_gs_base = evmcs->guest_gs_base; in copy_enlightened_to_vmcs12()
1518 vmcs12->guest_ldtr_base = evmcs->guest_ldtr_base; in copy_enlightened_to_vmcs12()
1519 vmcs12->guest_tr_base = evmcs->guest_tr_base; in copy_enlightened_to_vmcs12()
1520 vmcs12->guest_gdtr_base = evmcs->guest_gdtr_base; in copy_enlightened_to_vmcs12()
1521 vmcs12->guest_idtr_base = evmcs->guest_idtr_base; in copy_enlightened_to_vmcs12()
1522 vmcs12->guest_es_limit = evmcs->guest_es_limit; in copy_enlightened_to_vmcs12()
1523 vmcs12->guest_cs_limit = evmcs->guest_cs_limit; in copy_enlightened_to_vmcs12()
1524 vmcs12->guest_ss_limit = evmcs->guest_ss_limit; in copy_enlightened_to_vmcs12()
1525 vmcs12->guest_ds_limit = evmcs->guest_ds_limit; in copy_enlightened_to_vmcs12()
1526 vmcs12->guest_fs_limit = evmcs->guest_fs_limit; in copy_enlightened_to_vmcs12()
1527 vmcs12->guest_gs_limit = evmcs->guest_gs_limit; in copy_enlightened_to_vmcs12()
1528 vmcs12->guest_ldtr_limit = evmcs->guest_ldtr_limit; in copy_enlightened_to_vmcs12()
1529 vmcs12->guest_tr_limit = evmcs->guest_tr_limit; in copy_enlightened_to_vmcs12()
1530 vmcs12->guest_gdtr_limit = evmcs->guest_gdtr_limit; in copy_enlightened_to_vmcs12()
1531 vmcs12->guest_idtr_limit = evmcs->guest_idtr_limit; in copy_enlightened_to_vmcs12()
1532 vmcs12->guest_es_ar_bytes = evmcs->guest_es_ar_bytes; in copy_enlightened_to_vmcs12()
1533 vmcs12->guest_cs_ar_bytes = evmcs->guest_cs_ar_bytes; in copy_enlightened_to_vmcs12()
1534 vmcs12->guest_ss_ar_bytes = evmcs->guest_ss_ar_bytes; in copy_enlightened_to_vmcs12()
1535 vmcs12->guest_ds_ar_bytes = evmcs->guest_ds_ar_bytes; in copy_enlightened_to_vmcs12()
1536 vmcs12->guest_fs_ar_bytes = evmcs->guest_fs_ar_bytes; in copy_enlightened_to_vmcs12()
1537 vmcs12->guest_gs_ar_bytes = evmcs->guest_gs_ar_bytes; in copy_enlightened_to_vmcs12()
1538 vmcs12->guest_ldtr_ar_bytes = evmcs->guest_ldtr_ar_bytes; in copy_enlightened_to_vmcs12()
1539 vmcs12->guest_tr_ar_bytes = evmcs->guest_tr_ar_bytes; in copy_enlightened_to_vmcs12()
1540 vmcs12->guest_es_selector = evmcs->guest_es_selector; in copy_enlightened_to_vmcs12()
1541 vmcs12->guest_cs_selector = evmcs->guest_cs_selector; in copy_enlightened_to_vmcs12()
1542 vmcs12->guest_ss_selector = evmcs->guest_ss_selector; in copy_enlightened_to_vmcs12()
1543 vmcs12->guest_ds_selector = evmcs->guest_ds_selector; in copy_enlightened_to_vmcs12()
1544 vmcs12->guest_fs_selector = evmcs->guest_fs_selector; in copy_enlightened_to_vmcs12()
1545 vmcs12->guest_gs_selector = evmcs->guest_gs_selector; in copy_enlightened_to_vmcs12()
1546 vmcs12->guest_ldtr_selector = evmcs->guest_ldtr_selector; in copy_enlightened_to_vmcs12()
1547 vmcs12->guest_tr_selector = evmcs->guest_tr_selector; in copy_enlightened_to_vmcs12()
1552 vmcs12->tsc_offset = evmcs->tsc_offset; in copy_enlightened_to_vmcs12()
1553 vmcs12->virtual_apic_page_addr = evmcs->virtual_apic_page_addr; in copy_enlightened_to_vmcs12()
1554 vmcs12->xss_exit_bitmap = evmcs->xss_exit_bitmap; in copy_enlightened_to_vmcs12()
1559 vmcs12->cr0_guest_host_mask = evmcs->cr0_guest_host_mask; in copy_enlightened_to_vmcs12()
1560 vmcs12->cr4_guest_host_mask = evmcs->cr4_guest_host_mask; in copy_enlightened_to_vmcs12()
1561 vmcs12->cr0_read_shadow = evmcs->cr0_read_shadow; in copy_enlightened_to_vmcs12()
1562 vmcs12->cr4_read_shadow = evmcs->cr4_read_shadow; in copy_enlightened_to_vmcs12()
1563 vmcs12->guest_cr0 = evmcs->guest_cr0; in copy_enlightened_to_vmcs12()
1564 vmcs12->guest_cr3 = evmcs->guest_cr3; in copy_enlightened_to_vmcs12()
1565 vmcs12->guest_cr4 = evmcs->guest_cr4; in copy_enlightened_to_vmcs12()
1566 vmcs12->guest_dr7 = evmcs->guest_dr7; in copy_enlightened_to_vmcs12()
1571 vmcs12->host_fs_base = evmcs->host_fs_base; in copy_enlightened_to_vmcs12()
1572 vmcs12->host_gs_base = evmcs->host_gs_base; in copy_enlightened_to_vmcs12()
1573 vmcs12->host_tr_base = evmcs->host_tr_base; in copy_enlightened_to_vmcs12()
1574 vmcs12->host_gdtr_base = evmcs->host_gdtr_base; in copy_enlightened_to_vmcs12()
1575 vmcs12->host_idtr_base = evmcs->host_idtr_base; in copy_enlightened_to_vmcs12()
1576 vmcs12->host_rsp = evmcs->host_rsp; in copy_enlightened_to_vmcs12()
1581 vmcs12->ept_pointer = evmcs->ept_pointer; in copy_enlightened_to_vmcs12()
1582 vmcs12->virtual_processor_id = evmcs->virtual_processor_id; in copy_enlightened_to_vmcs12()
1587 vmcs12->vmcs_link_pointer = evmcs->vmcs_link_pointer; in copy_enlightened_to_vmcs12()
1588 vmcs12->guest_ia32_debugctl = evmcs->guest_ia32_debugctl; in copy_enlightened_to_vmcs12()
1589 vmcs12->guest_ia32_pat = evmcs->guest_ia32_pat; in copy_enlightened_to_vmcs12()
1590 vmcs12->guest_ia32_efer = evmcs->guest_ia32_efer; in copy_enlightened_to_vmcs12()
1591 vmcs12->guest_pdptr0 = evmcs->guest_pdptr0; in copy_enlightened_to_vmcs12()
1592 vmcs12->guest_pdptr1 = evmcs->guest_pdptr1; in copy_enlightened_to_vmcs12()
1593 vmcs12->guest_pdptr2 = evmcs->guest_pdptr2; in copy_enlightened_to_vmcs12()
1594 vmcs12->guest_pdptr3 = evmcs->guest_pdptr3; in copy_enlightened_to_vmcs12()
1595 vmcs12->guest_pending_dbg_exceptions = in copy_enlightened_to_vmcs12()
1597 vmcs12->guest_sysenter_esp = evmcs->guest_sysenter_esp; in copy_enlightened_to_vmcs12()
1598 vmcs12->guest_sysenter_eip = evmcs->guest_sysenter_eip; in copy_enlightened_to_vmcs12()
1599 vmcs12->guest_bndcfgs = evmcs->guest_bndcfgs; in copy_enlightened_to_vmcs12()
1600 vmcs12->guest_activity_state = evmcs->guest_activity_state; in copy_enlightened_to_vmcs12()
1601 vmcs12->guest_sysenter_cs = evmcs->guest_sysenter_cs; in copy_enlightened_to_vmcs12()
1649 struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; in copy_vmcs12_to_enlightened() local
1720 evmcs->guest_es_selector = vmcs12->guest_es_selector; in copy_vmcs12_to_enlightened()
1721 evmcs->guest_cs_selector = vmcs12->guest_cs_selector; in copy_vmcs12_to_enlightened()
1722 evmcs->guest_ss_selector = vmcs12->guest_ss_selector; in copy_vmcs12_to_enlightened()
1723 evmcs->guest_ds_selector = vmcs12->guest_ds_selector; in copy_vmcs12_to_enlightened()
1724 evmcs->guest_fs_selector = vmcs12->guest_fs_selector; in copy_vmcs12_to_enlightened()
1725 evmcs->guest_gs_selector = vmcs12->guest_gs_selector; in copy_vmcs12_to_enlightened()
1726 evmcs->guest_ldtr_selector = vmcs12->guest_ldtr_selector; in copy_vmcs12_to_enlightened()
1727 evmcs->guest_tr_selector = vmcs12->guest_tr_selector; in copy_vmcs12_to_enlightened()
1729 evmcs->guest_es_limit = vmcs12->guest_es_limit; in copy_vmcs12_to_enlightened()
1730 evmcs->guest_cs_limit = vmcs12->guest_cs_limit; in copy_vmcs12_to_enlightened()
1731 evmcs->guest_ss_limit = vmcs12->guest_ss_limit; in copy_vmcs12_to_enlightened()
1732 evmcs->guest_ds_limit = vmcs12->guest_ds_limit; in copy_vmcs12_to_enlightened()
1733 evmcs->guest_fs_limit = vmcs12->guest_fs_limit; in copy_vmcs12_to_enlightened()
1734 evmcs->guest_gs_limit = vmcs12->guest_gs_limit; in copy_vmcs12_to_enlightened()
1735 evmcs->guest_ldtr_limit = vmcs12->guest_ldtr_limit; in copy_vmcs12_to_enlightened()
1736 evmcs->guest_tr_limit = vmcs12->guest_tr_limit; in copy_vmcs12_to_enlightened()
1737 evmcs->guest_gdtr_limit = vmcs12->guest_gdtr_limit; in copy_vmcs12_to_enlightened()
1738 evmcs->guest_idtr_limit = vmcs12->guest_idtr_limit; in copy_vmcs12_to_enlightened()
1740 evmcs->guest_es_ar_bytes = vmcs12->guest_es_ar_bytes; in copy_vmcs12_to_enlightened()
1741 evmcs->guest_cs_ar_bytes = vmcs12->guest_cs_ar_bytes; in copy_vmcs12_to_enlightened()
1742 evmcs->guest_ss_ar_bytes = vmcs12->guest_ss_ar_bytes; in copy_vmcs12_to_enlightened()
1743 evmcs->guest_ds_ar_bytes = vmcs12->guest_ds_ar_bytes; in copy_vmcs12_to_enlightened()
1744 evmcs->guest_fs_ar_bytes = vmcs12->guest_fs_ar_bytes; in copy_vmcs12_to_enlightened()
1745 evmcs->guest_gs_ar_bytes = vmcs12->guest_gs_ar_bytes; in copy_vmcs12_to_enlightened()
1746 evmcs->guest_ldtr_ar_bytes = vmcs12->guest_ldtr_ar_bytes; in copy_vmcs12_to_enlightened()
1747 evmcs->guest_tr_ar_bytes = vmcs12->guest_tr_ar_bytes; in copy_vmcs12_to_enlightened()
1749 evmcs->guest_es_base = vmcs12->guest_es_base; in copy_vmcs12_to_enlightened()
1750 evmcs->guest_cs_base = vmcs12->guest_cs_base; in copy_vmcs12_to_enlightened()
1751 evmcs->guest_ss_base = vmcs12->guest_ss_base; in copy_vmcs12_to_enlightened()
1752 evmcs->guest_ds_base = vmcs12->guest_ds_base; in copy_vmcs12_to_enlightened()
1753 evmcs->guest_fs_base = vmcs12->guest_fs_base; in copy_vmcs12_to_enlightened()
1754 evmcs->guest_gs_base = vmcs12->guest_gs_base; in copy_vmcs12_to_enlightened()
1755 evmcs->guest_ldtr_base = vmcs12->guest_ldtr_base; in copy_vmcs12_to_enlightened()
1756 evmcs->guest_tr_base = vmcs12->guest_tr_base; in copy_vmcs12_to_enlightened()
1757 evmcs->guest_gdtr_base = vmcs12->guest_gdtr_base; in copy_vmcs12_to_enlightened()
1758 evmcs->guest_idtr_base = vmcs12->guest_idtr_base; in copy_vmcs12_to_enlightened()
1760 evmcs->guest_ia32_pat = vmcs12->guest_ia32_pat; in copy_vmcs12_to_enlightened()
1761 evmcs->guest_ia32_efer = vmcs12->guest_ia32_efer; in copy_vmcs12_to_enlightened()
1763 evmcs->guest_pdptr0 = vmcs12->guest_pdptr0; in copy_vmcs12_to_enlightened()
1764 evmcs->guest_pdptr1 = vmcs12->guest_pdptr1; in copy_vmcs12_to_enlightened()
1765 evmcs->guest_pdptr2 = vmcs12->guest_pdptr2; in copy_vmcs12_to_enlightened()
1766 evmcs->guest_pdptr3 = vmcs12->guest_pdptr3; in copy_vmcs12_to_enlightened()
1769 vmcs12->guest_pending_dbg_exceptions; in copy_vmcs12_to_enlightened()
1770 evmcs->guest_sysenter_esp = vmcs12->guest_sysenter_esp; in copy_vmcs12_to_enlightened()
1771 evmcs->guest_sysenter_eip = vmcs12->guest_sysenter_eip; in copy_vmcs12_to_enlightened()
1773 evmcs->guest_activity_state = vmcs12->guest_activity_state; in copy_vmcs12_to_enlightened()
1774 evmcs->guest_sysenter_cs = vmcs12->guest_sysenter_cs; in copy_vmcs12_to_enlightened()
1776 evmcs->guest_cr0 = vmcs12->guest_cr0; in copy_vmcs12_to_enlightened()
1777 evmcs->guest_cr3 = vmcs12->guest_cr3; in copy_vmcs12_to_enlightened()
1778 evmcs->guest_cr4 = vmcs12->guest_cr4; in copy_vmcs12_to_enlightened()
1779 evmcs->guest_dr7 = vmcs12->guest_dr7; in copy_vmcs12_to_enlightened()
1781 evmcs->guest_physical_address = vmcs12->guest_physical_address; in copy_vmcs12_to_enlightened()
1783 evmcs->vm_instruction_error = vmcs12->vm_instruction_error; in copy_vmcs12_to_enlightened()
1784 evmcs->vm_exit_reason = vmcs12->vm_exit_reason; in copy_vmcs12_to_enlightened()
1785 evmcs->vm_exit_intr_info = vmcs12->vm_exit_intr_info; in copy_vmcs12_to_enlightened()
1786 evmcs->vm_exit_intr_error_code = vmcs12->vm_exit_intr_error_code; in copy_vmcs12_to_enlightened()
1787 evmcs->idt_vectoring_info_field = vmcs12->idt_vectoring_info_field; in copy_vmcs12_to_enlightened()
1788 evmcs->idt_vectoring_error_code = vmcs12->idt_vectoring_error_code; in copy_vmcs12_to_enlightened()
1789 evmcs->vm_exit_instruction_len = vmcs12->vm_exit_instruction_len; in copy_vmcs12_to_enlightened()
1790 evmcs->vmx_instruction_info = vmcs12->vmx_instruction_info; in copy_vmcs12_to_enlightened()
1792 evmcs->exit_qualification = vmcs12->exit_qualification; in copy_vmcs12_to_enlightened()
1794 evmcs->guest_linear_address = vmcs12->guest_linear_address; in copy_vmcs12_to_enlightened()
1795 evmcs->guest_rsp = vmcs12->guest_rsp; in copy_vmcs12_to_enlightened()
1796 evmcs->guest_rflags = vmcs12->guest_rflags; in copy_vmcs12_to_enlightened()
1799 vmcs12->guest_interruptibility_info; in copy_vmcs12_to_enlightened()
1800 evmcs->cpu_based_vm_exec_control = vmcs12->cpu_based_vm_exec_control; in copy_vmcs12_to_enlightened()
1801 evmcs->vm_entry_controls = vmcs12->vm_entry_controls; in copy_vmcs12_to_enlightened()
1802 evmcs->vm_entry_intr_info_field = vmcs12->vm_entry_intr_info_field; in copy_vmcs12_to_enlightened()
1804 vmcs12->vm_entry_exception_error_code; in copy_vmcs12_to_enlightened()
1805 evmcs->vm_entry_instruction_len = vmcs12->vm_entry_instruction_len; in copy_vmcs12_to_enlightened()
1807 evmcs->guest_rip = vmcs12->guest_rip; in copy_vmcs12_to_enlightened()
1809 evmcs->guest_bndcfgs = vmcs12->guest_bndcfgs; in copy_vmcs12_to_enlightened()
1882 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_handle_enlightened_vmptrld() local
1883 memset(vmcs12, 0, sizeof(*vmcs12)); in nested_vmx_handle_enlightened_vmptrld()
1884 vmcs12->hdr.revision_id = VMCS12_REVISION; in nested_vmx_handle_enlightened_vmptrld()
1960 static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in nested_vmx_calc_efer() argument
1963 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) in nested_vmx_calc_efer()
1964 return vmcs12->guest_ia32_efer; in nested_vmx_calc_efer()
1965 else if (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) in nested_vmx_calc_efer()
2029 struct vmcs12 *vmcs12) in prepare_vmcs02_early_rare() argument
2036 if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) in prepare_vmcs02_early_rare()
2043 static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_early() argument
2046 u64 guest_efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02_early()
2049 prepare_vmcs02_early_rare(vmx, vmcs12); in prepare_vmcs02_early()
2055 exec_control |= (vmcs12->pin_based_vm_exec_control & in prepare_vmcs02_early()
2059 if (nested_cpu_has_posted_intr(vmcs12)) { in prepare_vmcs02_early()
2060 vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; in prepare_vmcs02_early()
2074 exec_control |= vmcs12->cpu_based_vm_exec_control; in prepare_vmcs02_early()
2077 vmcs_write32(TPR_THRESHOLD, vmcs12->tpr_threshold); in prepare_vmcs02_early()
2117 if (nested_cpu_has(vmcs12, in prepare_vmcs02_early()
2119 vmcs12_exec_ctrl = vmcs12->secondary_vm_exec_control & in prepare_vmcs02_early()
2132 (vmcs12->guest_cr4 & X86_CR4_UMIP)) in prepare_vmcs02_early()
2137 vmcs12->guest_intr_status); in prepare_vmcs02_early()
2150 exec_control = (vmcs12->vm_entry_controls | vmx_vmentry_ctrl()) & in prepare_vmcs02_early()
2177 vmcs12->vm_entry_intr_info_field); in prepare_vmcs02_early()
2179 vmcs12->vm_entry_exception_error_code); in prepare_vmcs02_early()
2181 vmcs12->vm_entry_instruction_len); in prepare_vmcs02_early()
2183 vmcs12->guest_interruptibility_info); in prepare_vmcs02_early()
2185 !(vmcs12->guest_interruptibility_info & GUEST_INTR_STATE_NMI); in prepare_vmcs02_early()
2191 static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) in prepare_vmcs02_rare() argument
2197 vmcs_write16(GUEST_ES_SELECTOR, vmcs12->guest_es_selector); in prepare_vmcs02_rare()
2198 vmcs_write16(GUEST_CS_SELECTOR, vmcs12->guest_cs_selector); in prepare_vmcs02_rare()
2199 vmcs_write16(GUEST_SS_SELECTOR, vmcs12->guest_ss_selector); in prepare_vmcs02_rare()
2200 vmcs_write16(GUEST_DS_SELECTOR, vmcs12->guest_ds_selector); in prepare_vmcs02_rare()
2201 vmcs_write16(GUEST_FS_SELECTOR, vmcs12->guest_fs_selector); in prepare_vmcs02_rare()
2202 vmcs_write16(GUEST_GS_SELECTOR, vmcs12->guest_gs_selector); in prepare_vmcs02_rare()
2203 vmcs_write16(GUEST_LDTR_SELECTOR, vmcs12->guest_ldtr_selector); in prepare_vmcs02_rare()
2204 vmcs_write16(GUEST_TR_SELECTOR, vmcs12->guest_tr_selector); in prepare_vmcs02_rare()
2205 vmcs_write32(GUEST_ES_LIMIT, vmcs12->guest_es_limit); in prepare_vmcs02_rare()
2206 vmcs_write32(GUEST_CS_LIMIT, vmcs12->guest_cs_limit); in prepare_vmcs02_rare()
2207 vmcs_write32(GUEST_SS_LIMIT, vmcs12->guest_ss_limit); in prepare_vmcs02_rare()
2208 vmcs_write32(GUEST_DS_LIMIT, vmcs12->guest_ds_limit); in prepare_vmcs02_rare()
2209 vmcs_write32(GUEST_FS_LIMIT, vmcs12->guest_fs_limit); in prepare_vmcs02_rare()
2210 vmcs_write32(GUEST_GS_LIMIT, vmcs12->guest_gs_limit); in prepare_vmcs02_rare()
2211 vmcs_write32(GUEST_LDTR_LIMIT, vmcs12->guest_ldtr_limit); in prepare_vmcs02_rare()
2212 vmcs_write32(GUEST_TR_LIMIT, vmcs12->guest_tr_limit); in prepare_vmcs02_rare()
2213 vmcs_write32(GUEST_GDTR_LIMIT, vmcs12->guest_gdtr_limit); in prepare_vmcs02_rare()
2214 vmcs_write32(GUEST_IDTR_LIMIT, vmcs12->guest_idtr_limit); in prepare_vmcs02_rare()
2215 vmcs_write32(GUEST_CS_AR_BYTES, vmcs12->guest_cs_ar_bytes); in prepare_vmcs02_rare()
2216 vmcs_write32(GUEST_SS_AR_BYTES, vmcs12->guest_ss_ar_bytes); in prepare_vmcs02_rare()
2217 vmcs_write32(GUEST_ES_AR_BYTES, vmcs12->guest_es_ar_bytes); in prepare_vmcs02_rare()
2218 vmcs_write32(GUEST_DS_AR_BYTES, vmcs12->guest_ds_ar_bytes); in prepare_vmcs02_rare()
2219 vmcs_write32(GUEST_FS_AR_BYTES, vmcs12->guest_fs_ar_bytes); in prepare_vmcs02_rare()
2220 vmcs_write32(GUEST_GS_AR_BYTES, vmcs12->guest_gs_ar_bytes); in prepare_vmcs02_rare()
2221 vmcs_write32(GUEST_LDTR_AR_BYTES, vmcs12->guest_ldtr_ar_bytes); in prepare_vmcs02_rare()
2222 vmcs_write32(GUEST_TR_AR_BYTES, vmcs12->guest_tr_ar_bytes); in prepare_vmcs02_rare()
2223 vmcs_writel(GUEST_ES_BASE, vmcs12->guest_es_base); in prepare_vmcs02_rare()
2224 vmcs_writel(GUEST_CS_BASE, vmcs12->guest_cs_base); in prepare_vmcs02_rare()
2225 vmcs_writel(GUEST_SS_BASE, vmcs12->guest_ss_base); in prepare_vmcs02_rare()
2226 vmcs_writel(GUEST_DS_BASE, vmcs12->guest_ds_base); in prepare_vmcs02_rare()
2227 vmcs_writel(GUEST_FS_BASE, vmcs12->guest_fs_base); in prepare_vmcs02_rare()
2228 vmcs_writel(GUEST_GS_BASE, vmcs12->guest_gs_base); in prepare_vmcs02_rare()
2229 vmcs_writel(GUEST_LDTR_BASE, vmcs12->guest_ldtr_base); in prepare_vmcs02_rare()
2230 vmcs_writel(GUEST_TR_BASE, vmcs12->guest_tr_base); in prepare_vmcs02_rare()
2231 vmcs_writel(GUEST_GDTR_BASE, vmcs12->guest_gdtr_base); in prepare_vmcs02_rare()
2232 vmcs_writel(GUEST_IDTR_BASE, vmcs12->guest_idtr_base); in prepare_vmcs02_rare()
2237 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->guest_sysenter_cs); in prepare_vmcs02_rare()
2239 vmcs12->guest_pending_dbg_exceptions); in prepare_vmcs02_rare()
2240 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->guest_sysenter_esp); in prepare_vmcs02_rare()
2241 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->guest_sysenter_eip); in prepare_vmcs02_rare()
2248 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02_rare()
2249 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02_rare()
2250 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02_rare()
2251 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02_rare()
2255 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) in prepare_vmcs02_rare()
2256 vmcs_write64(GUEST_BNDCFGS, vmcs12->guest_bndcfgs); in prepare_vmcs02_rare()
2259 if (nested_cpu_has_xsaves(vmcs12)) in prepare_vmcs02_rare()
2260 vmcs_write64(XSS_EXIT_BITMAP, vmcs12->xss_exit_bitmap); in prepare_vmcs02_rare()
2277 enable_ept ? vmcs12->page_fault_error_code_mask : 0); in prepare_vmcs02_rare()
2279 enable_ept ? vmcs12->page_fault_error_code_match : 0); in prepare_vmcs02_rare()
2282 vmcs_write64(EOI_EXIT_BITMAP0, vmcs12->eoi_exit_bitmap0); in prepare_vmcs02_rare()
2283 vmcs_write64(EOI_EXIT_BITMAP1, vmcs12->eoi_exit_bitmap1); in prepare_vmcs02_rare()
2284 vmcs_write64(EOI_EXIT_BITMAP2, vmcs12->eoi_exit_bitmap2); in prepare_vmcs02_rare()
2285 vmcs_write64(EOI_EXIT_BITMAP3, vmcs12->eoi_exit_bitmap3); in prepare_vmcs02_rare()
2305 static int prepare_vmcs02(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs02() argument
2313 prepare_vmcs02_rare(vmx, vmcs12); in prepare_vmcs02()
2322 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) { in prepare_vmcs02()
2323 kvm_set_dr(vcpu, 7, vmcs12->guest_dr7); in prepare_vmcs02()
2324 vmcs_write64(GUEST_IA32_DEBUGCTL, vmcs12->guest_ia32_debugctl); in prepare_vmcs02()
2330 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) in prepare_vmcs02()
2332 vmx_set_rflags(vcpu, vmcs12->guest_rflags); in prepare_vmcs02()
2339 vcpu->arch.cr0_guest_owned_bits &= ~vmcs12->cr0_guest_host_mask; in prepare_vmcs02()
2343 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT)) { in prepare_vmcs02()
2344 vmcs_write64(GUEST_IA32_PAT, vmcs12->guest_ia32_pat); in prepare_vmcs02()
2345 vcpu->arch.pat = vmcs12->guest_ia32_pat; in prepare_vmcs02()
2364 if (nested_cpu_has_vpid(vmcs12) && nested_has_guest_tlb_tag(vcpu)) { in prepare_vmcs02()
2365 if (vmcs12->virtual_processor_id != vmx->nested.last_vpid) { in prepare_vmcs02()
2366 vmx->nested.last_vpid = vmcs12->virtual_processor_id; in prepare_vmcs02()
2382 if (nested_cpu_has_ept(vmcs12)) in prepare_vmcs02()
2384 else if (nested_cpu_has2(vmcs12, in prepare_vmcs02()
2396 vmx_set_cr0(vcpu, vmcs12->guest_cr0); in prepare_vmcs02()
2397 vmcs_writel(CR0_READ_SHADOW, nested_read_cr0(vmcs12)); in prepare_vmcs02()
2399 vmx_set_cr4(vcpu, vmcs12->guest_cr4); in prepare_vmcs02()
2400 vmcs_writel(CR4_READ_SHADOW, nested_read_cr4(vmcs12)); in prepare_vmcs02()
2402 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); in prepare_vmcs02()
2417 if (nested_vmx_load_cr3(vcpu, vmcs12->guest_cr3, nested_cpu_has_ept(vmcs12), in prepare_vmcs02()
2422 if (load_guest_pdptrs_vmcs12 && nested_cpu_has_ept(vmcs12) && in prepare_vmcs02()
2424 vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); in prepare_vmcs02()
2425 vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); in prepare_vmcs02()
2426 vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); in prepare_vmcs02()
2427 vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); in prepare_vmcs02()
2433 kvm_rsp_write(vcpu, vmcs12->guest_rsp); in prepare_vmcs02()
2434 kvm_rip_write(vcpu, vmcs12->guest_rip); in prepare_vmcs02()
2438 static int nested_vmx_check_nmi_controls(struct vmcs12 *vmcs12) in nested_vmx_check_nmi_controls() argument
2440 if (CC(!nested_cpu_has_nmi_exiting(vmcs12) && in nested_vmx_check_nmi_controls()
2441 nested_cpu_has_virtual_nmis(vmcs12))) in nested_vmx_check_nmi_controls()
2444 if (CC(!nested_cpu_has_virtual_nmis(vmcs12) && in nested_vmx_check_nmi_controls()
2445 nested_cpu_has(vmcs12, CPU_BASED_VIRTUAL_NMI_PENDING))) in nested_vmx_check_nmi_controls()
2491 struct vmcs12 *vmcs12) in nested_check_vm_execution_controls() argument
2495 if (CC(!vmx_control_verify(vmcs12->pin_based_vm_exec_control, in nested_check_vm_execution_controls()
2498 CC(!vmx_control_verify(vmcs12->cpu_based_vm_exec_control, in nested_check_vm_execution_controls()
2503 if (nested_cpu_has(vmcs12, CPU_BASED_ACTIVATE_SECONDARY_CONTROLS) && in nested_check_vm_execution_controls()
2504 CC(!vmx_control_verify(vmcs12->secondary_vm_exec_control, in nested_check_vm_execution_controls()
2509 if (CC(vmcs12->cr3_target_count > nested_cpu_vmx_misc_cr3_count(vcpu)) || in nested_check_vm_execution_controls()
2510 nested_vmx_check_io_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2511 nested_vmx_check_msr_bitmap_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2512 nested_vmx_check_tpr_shadow_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2513 nested_vmx_check_apic_access_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2514 nested_vmx_check_apicv_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2515 nested_vmx_check_nmi_controls(vmcs12) || in nested_check_vm_execution_controls()
2516 nested_vmx_check_pml_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2517 nested_vmx_check_unrestricted_guest_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2518 nested_vmx_check_mode_based_ept_exec_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2519 nested_vmx_check_shadow_vmcs_controls(vcpu, vmcs12) || in nested_check_vm_execution_controls()
2520 CC(nested_cpu_has_vpid(vmcs12) && !vmcs12->virtual_processor_id)) in nested_check_vm_execution_controls()
2523 if (!nested_cpu_has_preemption_timer(vmcs12) && in nested_check_vm_execution_controls()
2524 nested_cpu_has_save_preemption_timer(vmcs12)) in nested_check_vm_execution_controls()
2527 if (nested_cpu_has_ept(vmcs12) && in nested_check_vm_execution_controls()
2528 CC(!valid_ept_address(vcpu, vmcs12->ept_pointer))) in nested_check_vm_execution_controls()
2531 if (nested_cpu_has_vmfunc(vmcs12)) { in nested_check_vm_execution_controls()
2532 if (CC(vmcs12->vm_function_control & in nested_check_vm_execution_controls()
2536 if (nested_cpu_has_eptp_switching(vmcs12)) { in nested_check_vm_execution_controls()
2537 if (CC(!nested_cpu_has_ept(vmcs12)) || in nested_check_vm_execution_controls()
2538 CC(!page_address_valid(vcpu, vmcs12->eptp_list_address))) in nested_check_vm_execution_controls()
2550 struct vmcs12 *vmcs12) in nested_check_vm_exit_controls() argument
2554 if (CC(!vmx_control_verify(vmcs12->vm_exit_controls, in nested_check_vm_exit_controls()
2557 CC(nested_vmx_check_exit_msr_switch_controls(vcpu, vmcs12))) in nested_check_vm_exit_controls()
2567 struct vmcs12 *vmcs12) in nested_check_vm_entry_controls() argument
2571 if (CC(!vmx_control_verify(vmcs12->vm_entry_controls, in nested_check_vm_entry_controls()
2582 if (vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) { in nested_check_vm_entry_controls()
2583 u32 intr_info = vmcs12->vm_entry_intr_info_field; in nested_check_vm_entry_controls()
2588 bool urg = nested_cpu_has2(vmcs12, in nested_check_vm_entry_controls()
2590 bool prot_mode = !urg || vmcs12->guest_cr0 & X86_CR0_PE; in nested_check_vm_entry_controls()
2613 vmcs12->vm_entry_exception_error_code & GENMASK(31, 16))) in nested_check_vm_entry_controls()
2625 if (CC(vmcs12->vm_entry_instruction_len > 15) || in nested_check_vm_entry_controls()
2626 CC(vmcs12->vm_entry_instruction_len == 0 && in nested_check_vm_entry_controls()
2632 if (nested_vmx_check_entry_msr_switch_controls(vcpu, vmcs12)) in nested_check_vm_entry_controls()
2639 struct vmcs12 *vmcs12) in nested_vmx_check_controls() argument
2641 if (nested_check_vm_execution_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2642 nested_check_vm_exit_controls(vcpu, vmcs12) || in nested_vmx_check_controls()
2643 nested_check_vm_entry_controls(vcpu, vmcs12)) in nested_vmx_check_controls()
2650 struct vmcs12 *vmcs12) in nested_vmx_check_host_state() argument
2654 if (CC(!nested_host_cr0_valid(vcpu, vmcs12->host_cr0)) || in nested_vmx_check_host_state()
2655 CC(!nested_host_cr4_valid(vcpu, vmcs12->host_cr4)) || in nested_vmx_check_host_state()
2656 CC(!nested_cr3_valid(vcpu, vmcs12->host_cr3))) in nested_vmx_check_host_state()
2659 if (CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_esp, vcpu)) || in nested_vmx_check_host_state()
2660 CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_eip, vcpu))) in nested_vmx_check_host_state()
2663 if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) && in nested_vmx_check_host_state()
2664 CC(!kvm_pat_valid(vmcs12->host_ia32_pat))) in nested_vmx_check_host_state()
2674 if (CC(!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE)) || in nested_vmx_check_host_state()
2675 CC(!(vmcs12->host_cr4 & X86_CR4_PAE))) in nested_vmx_check_host_state()
2678 if (CC(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) || in nested_vmx_check_host_state()
2679 CC(vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) || in nested_vmx_check_host_state()
2680 CC(vmcs12->host_cr4 & X86_CR4_PCIDE) || in nested_vmx_check_host_state()
2681 CC((vmcs12->host_rip) >> 32)) in nested_vmx_check_host_state()
2685 if (CC(vmcs12->host_cs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2686 CC(vmcs12->host_ss_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2687 CC(vmcs12->host_ds_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2688 CC(vmcs12->host_es_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2689 CC(vmcs12->host_fs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2690 CC(vmcs12->host_gs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2691 CC(vmcs12->host_tr_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || in nested_vmx_check_host_state()
2692 CC(vmcs12->host_cs_selector == 0) || in nested_vmx_check_host_state()
2693 CC(vmcs12->host_tr_selector == 0) || in nested_vmx_check_host_state()
2694 CC(vmcs12->host_ss_selector == 0 && !ia32e)) in nested_vmx_check_host_state()
2698 if (CC(is_noncanonical_address(vmcs12->host_fs_base, vcpu)) || in nested_vmx_check_host_state()
2699 CC(is_noncanonical_address(vmcs12->host_gs_base, vcpu)) || in nested_vmx_check_host_state()
2700 CC(is_noncanonical_address(vmcs12->host_gdtr_base, vcpu)) || in nested_vmx_check_host_state()
2701 CC(is_noncanonical_address(vmcs12->host_idtr_base, vcpu)) || in nested_vmx_check_host_state()
2702 CC(is_noncanonical_address(vmcs12->host_tr_base, vcpu)) || in nested_vmx_check_host_state()
2703 CC(is_noncanonical_address(vmcs12->host_rip, vcpu))) in nested_vmx_check_host_state()
2713 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) { in nested_vmx_check_host_state()
2714 if (CC(!kvm_valid_efer(vcpu, vmcs12->host_ia32_efer)) || in nested_vmx_check_host_state()
2715 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || in nested_vmx_check_host_state()
2716 CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) in nested_vmx_check_host_state()
2724 struct vmcs12 *vmcs12) in nested_vmx_check_vmcs_link_ptr() argument
2727 struct vmcs12 *shadow; in nested_vmx_check_vmcs_link_ptr()
2730 if (vmcs12->vmcs_link_pointer == -1ull) in nested_vmx_check_vmcs_link_ptr()
2733 if (CC(!page_address_valid(vcpu, vmcs12->vmcs_link_pointer))) in nested_vmx_check_vmcs_link_ptr()
2736 if (CC(kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map))) in nested_vmx_check_vmcs_link_ptr()
2742 CC(shadow->hdr.shadow_vmcs != nested_cpu_has_shadow_vmcs(vmcs12))) in nested_vmx_check_vmcs_link_ptr()
2752 static int nested_check_guest_non_reg_state(struct vmcs12 *vmcs12) in nested_check_guest_non_reg_state() argument
2754 if (CC(vmcs12->guest_activity_state != GUEST_ACTIVITY_ACTIVE && in nested_check_guest_non_reg_state()
2755 vmcs12->guest_activity_state != GUEST_ACTIVITY_HLT)) in nested_check_guest_non_reg_state()
2762 struct vmcs12 *vmcs12, in nested_vmx_check_guest_state() argument
2769 if (CC(!nested_guest_cr0_valid(vcpu, vmcs12->guest_cr0)) || in nested_vmx_check_guest_state()
2770 CC(!nested_guest_cr4_valid(vcpu, vmcs12->guest_cr4))) in nested_vmx_check_guest_state()
2773 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT) && in nested_vmx_check_guest_state()
2774 CC(!kvm_pat_valid(vmcs12->guest_ia32_pat))) in nested_vmx_check_guest_state()
2777 if (nested_vmx_check_vmcs_link_ptr(vcpu, vmcs12)) { in nested_vmx_check_guest_state()
2792 (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) { in nested_vmx_check_guest_state()
2793 ia32e = (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) != 0; in nested_vmx_check_guest_state()
2794 if (CC(!kvm_valid_efer(vcpu, vmcs12->guest_ia32_efer)) || in nested_vmx_check_guest_state()
2795 CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || in nested_vmx_check_guest_state()
2796 CC(((vmcs12->guest_cr0 & X86_CR0_PG) && in nested_vmx_check_guest_state()
2797 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) in nested_vmx_check_guest_state()
2801 if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS) && in nested_vmx_check_guest_state()
2802 (CC(is_noncanonical_address(vmcs12->guest_bndcfgs & PAGE_MASK, vcpu)) || in nested_vmx_check_guest_state()
2803 CC((vmcs12->guest_bndcfgs & MSR_IA32_BNDCFGS_RSVD)))) in nested_vmx_check_guest_state()
2806 if (nested_check_guest_non_reg_state(vmcs12)) in nested_vmx_check_guest_state()
2918 struct vmcs12 *vmcs12);
2922 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_get_vmcs12_pages() local
2928 if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
2939 page = kvm_vcpu_gpa_to_page(vcpu, vmcs12->apic_access_addr); in nested_get_vmcs12_pages()
2955 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_get_vmcs12_pages()
2958 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->virtual_apic_page_addr), map)) { in nested_get_vmcs12_pages()
2960 } else if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING) && in nested_get_vmcs12_pages()
2961 nested_cpu_has(vmcs12, CPU_BASED_CR8_STORE_EXITING) && in nested_get_vmcs12_pages()
2962 !nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { in nested_get_vmcs12_pages()
2981 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_get_vmcs12_pages()
2984 if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->posted_intr_desc_addr), map)) { in nested_get_vmcs12_pages()
2987 offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
2989 pfn_to_hpa(map->pfn) + offset_in_page(vmcs12->posted_intr_desc_addr)); in nested_get_vmcs12_pages()
2992 if (nested_vmx_prepare_msr_bitmap(vcpu, vmcs12)) in nested_get_vmcs12_pages()
3030 struct vmcs12 *vmcs12);
3046 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_enter_non_root_mode() local
3056 if (!(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) in nested_vmx_enter_non_root_mode()
3059 !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) in nested_vmx_enter_non_root_mode()
3083 prepare_vmcs02_early(vmx, vmcs12); in nested_vmx_enter_non_root_mode()
3094 if (nested_vmx_check_guest_state(vcpu, vmcs12, &exit_qual)) in nested_vmx_enter_non_root_mode()
3099 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING) in nested_vmx_enter_non_root_mode()
3100 vcpu->arch.tsc_offset += vmcs12->tsc_offset; in nested_vmx_enter_non_root_mode()
3102 if (prepare_vmcs02(vcpu, vmcs12, &exit_qual)) in nested_vmx_enter_non_root_mode()
3108 vmcs12->vm_entry_msr_load_addr, in nested_vmx_enter_non_root_mode()
3109 vmcs12->vm_entry_msr_load_count); in nested_vmx_enter_non_root_mode()
3146 if (nested_cpu_has_preemption_timer(vmcs12)) in nested_vmx_enter_non_root_mode()
3163 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING) in nested_vmx_enter_non_root_mode()
3164 vcpu->arch.tsc_offset -= vmcs12->tsc_offset; in nested_vmx_enter_non_root_mode()
3173 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_enter_non_root_mode()
3174 vmcs12->vm_exit_reason = exit_reason | VMX_EXIT_REASONS_FAILED_VMENTRY; in nested_vmx_enter_non_root_mode()
3175 vmcs12->exit_qualification = exit_qual; in nested_vmx_enter_non_root_mode()
3187 struct vmcs12 *vmcs12; in nested_vmx_run() local
3201 vmcs12 = get_vmcs12(vcpu); in nested_vmx_run()
3209 if (vmcs12->hdr.shadow_vmcs) in nested_vmx_run()
3215 vmcs12->launch_state = !launch; in nested_vmx_run()
3234 if (vmcs12->launch_state == launch) in nested_vmx_run()
3239 if (nested_vmx_check_controls(vcpu, vmcs12)) in nested_vmx_run()
3242 if (nested_vmx_check_host_state(vcpu, vmcs12)) in nested_vmx_run()
3267 nested_cache_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_run()
3274 if ((vmcs12->guest_activity_state == GUEST_ACTIVITY_HLT) && in nested_vmx_run()
3275 !(vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) && in nested_vmx_run()
3276 !(vmcs12->cpu_based_vm_exec_control & CPU_BASED_VIRTUAL_NMI_PENDING) && in nested_vmx_run()
3277 !((vmcs12->cpu_based_vm_exec_control & CPU_BASED_VIRTUAL_INTR_PENDING) && in nested_vmx_run()
3278 (vmcs12->guest_rflags & X86_EFLAGS_IF))) { in nested_vmx_run()
3312 vmcs12_guest_cr0(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr0() argument
3316 /*2*/ (vmcs12->guest_cr0 & vmcs12->cr0_guest_host_mask) | in vmcs12_guest_cr0()
3317 /*3*/ (vmcs_readl(CR0_READ_SHADOW) & ~(vmcs12->cr0_guest_host_mask | in vmcs12_guest_cr0()
3322 vmcs12_guest_cr4(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in vmcs12_guest_cr4() argument
3326 /*2*/ (vmcs12->guest_cr4 & vmcs12->cr4_guest_host_mask) | in vmcs12_guest_cr4()
3327 /*3*/ (vmcs_readl(CR4_READ_SHADOW) & ~(vmcs12->cr4_guest_host_mask | in vmcs12_guest_cr4()
3332 struct vmcs12 *vmcs12) in vmcs12_save_pending_event() argument
3342 vmcs12->vm_exit_instruction_len = in vmcs12_save_pending_event()
3350 vmcs12->idt_vectoring_error_code = in vmcs12_save_pending_event()
3354 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3356 vmcs12->idt_vectoring_info_field = in vmcs12_save_pending_event()
3364 vmcs12->vm_entry_instruction_len = in vmcs12_save_pending_event()
3369 vmcs12->idt_vectoring_info_field = idt_vectoring; in vmcs12_save_pending_event()
3376 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_mark_vmcs12_pages_dirty() local
3384 if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { in nested_mark_vmcs12_pages_dirty()
3385 gfn = vmcs12->virtual_apic_page_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3389 if (nested_cpu_has_posted_intr(vmcs12)) { in nested_mark_vmcs12_pages_dirty()
3390 gfn = vmcs12->posted_intr_desc_addr >> PAGE_SHIFT; in nested_mark_vmcs12_pages_dirty()
3431 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_inject_exception_vmexit() local
3436 vmcs12->vm_exit_intr_error_code = vcpu->arch.exception.error_code; in nested_vmx_inject_exception_vmexit()
3445 if (!(vmcs12->idt_vectoring_info_field & VECTORING_INFO_VALID_MASK) && in nested_vmx_inject_exception_vmexit()
3573 struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12_rare() argument
3577 vmcs12->guest_es_selector = vmcs_read16(GUEST_ES_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3578 vmcs12->guest_cs_selector = vmcs_read16(GUEST_CS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3579 vmcs12->guest_ss_selector = vmcs_read16(GUEST_SS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3580 vmcs12->guest_ds_selector = vmcs_read16(GUEST_DS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3581 vmcs12->guest_fs_selector = vmcs_read16(GUEST_FS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3582 vmcs12->guest_gs_selector = vmcs_read16(GUEST_GS_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3583 vmcs12->guest_ldtr_selector = vmcs_read16(GUEST_LDTR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3584 vmcs12->guest_tr_selector = vmcs_read16(GUEST_TR_SELECTOR); in sync_vmcs02_to_vmcs12_rare()
3585 vmcs12->guest_es_limit = vmcs_read32(GUEST_ES_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3586 vmcs12->guest_cs_limit = vmcs_read32(GUEST_CS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3587 vmcs12->guest_ss_limit = vmcs_read32(GUEST_SS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3588 vmcs12->guest_ds_limit = vmcs_read32(GUEST_DS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3589 vmcs12->guest_fs_limit = vmcs_read32(GUEST_FS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3590 vmcs12->guest_gs_limit = vmcs_read32(GUEST_GS_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3591 vmcs12->guest_ldtr_limit = vmcs_read32(GUEST_LDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3592 vmcs12->guest_tr_limit = vmcs_read32(GUEST_TR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3593 vmcs12->guest_gdtr_limit = vmcs_read32(GUEST_GDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3594 vmcs12->guest_idtr_limit = vmcs_read32(GUEST_IDTR_LIMIT); in sync_vmcs02_to_vmcs12_rare()
3595 vmcs12->guest_es_ar_bytes = vmcs_read32(GUEST_ES_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3596 vmcs12->guest_ds_ar_bytes = vmcs_read32(GUEST_DS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3597 vmcs12->guest_fs_ar_bytes = vmcs_read32(GUEST_FS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3598 vmcs12->guest_gs_ar_bytes = vmcs_read32(GUEST_GS_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3599 vmcs12->guest_ldtr_ar_bytes = vmcs_read32(GUEST_LDTR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3600 vmcs12->guest_tr_ar_bytes = vmcs_read32(GUEST_TR_AR_BYTES); in sync_vmcs02_to_vmcs12_rare()
3601 vmcs12->guest_es_base = vmcs_readl(GUEST_ES_BASE); in sync_vmcs02_to_vmcs12_rare()
3602 vmcs12->guest_cs_base = vmcs_readl(GUEST_CS_BASE); in sync_vmcs02_to_vmcs12_rare()
3603 vmcs12->guest_ss_base = vmcs_readl(GUEST_SS_BASE); in sync_vmcs02_to_vmcs12_rare()
3604 vmcs12->guest_ds_base = vmcs_readl(GUEST_DS_BASE); in sync_vmcs02_to_vmcs12_rare()
3605 vmcs12->guest_fs_base = vmcs_readl(GUEST_FS_BASE); in sync_vmcs02_to_vmcs12_rare()
3606 vmcs12->guest_gs_base = vmcs_readl(GUEST_GS_BASE); in sync_vmcs02_to_vmcs12_rare()
3607 vmcs12->guest_ldtr_base = vmcs_readl(GUEST_LDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
3608 vmcs12->guest_tr_base = vmcs_readl(GUEST_TR_BASE); in sync_vmcs02_to_vmcs12_rare()
3609 vmcs12->guest_gdtr_base = vmcs_readl(GUEST_GDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
3610 vmcs12->guest_idtr_base = vmcs_readl(GUEST_IDTR_BASE); in sync_vmcs02_to_vmcs12_rare()
3611 vmcs12->guest_pending_dbg_exceptions = in sync_vmcs02_to_vmcs12_rare()
3614 vmcs12->guest_bndcfgs = vmcs_read64(GUEST_BNDCFGS); in sync_vmcs02_to_vmcs12_rare()
3620 struct vmcs12 *vmcs12) in copy_vmcs02_to_vmcs12_rare() argument
3635 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in copy_vmcs02_to_vmcs12_rare()
3648 static void sync_vmcs02_to_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) in sync_vmcs02_to_vmcs12() argument
3653 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
3657 vmcs12->guest_cr0 = vmcs12_guest_cr0(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
3658 vmcs12->guest_cr4 = vmcs12_guest_cr4(vcpu, vmcs12); in sync_vmcs02_to_vmcs12()
3660 vmcs12->guest_rsp = kvm_rsp_read(vcpu); in sync_vmcs02_to_vmcs12()
3661 vmcs12->guest_rip = kvm_rip_read(vcpu); in sync_vmcs02_to_vmcs12()
3662 vmcs12->guest_rflags = vmcs_readl(GUEST_RFLAGS); in sync_vmcs02_to_vmcs12()
3664 vmcs12->guest_cs_ar_bytes = vmcs_read32(GUEST_CS_AR_BYTES); in sync_vmcs02_to_vmcs12()
3665 vmcs12->guest_ss_ar_bytes = vmcs_read32(GUEST_SS_AR_BYTES); in sync_vmcs02_to_vmcs12()
3667 vmcs12->guest_sysenter_cs = vmcs_read32(GUEST_SYSENTER_CS); in sync_vmcs02_to_vmcs12()
3668 vmcs12->guest_sysenter_esp = vmcs_readl(GUEST_SYSENTER_ESP); in sync_vmcs02_to_vmcs12()
3669 vmcs12->guest_sysenter_eip = vmcs_readl(GUEST_SYSENTER_EIP); in sync_vmcs02_to_vmcs12()
3671 vmcs12->guest_interruptibility_info = in sync_vmcs02_to_vmcs12()
3675 vmcs12->guest_activity_state = GUEST_ACTIVITY_HLT; in sync_vmcs02_to_vmcs12()
3677 vmcs12->guest_activity_state = GUEST_ACTIVITY_ACTIVE; in sync_vmcs02_to_vmcs12()
3679 if (nested_cpu_has_preemption_timer(vmcs12) && in sync_vmcs02_to_vmcs12()
3680 vmcs12->vm_exit_controls & VM_EXIT_SAVE_VMX_PREEMPTION_TIMER) in sync_vmcs02_to_vmcs12()
3681 vmcs12->vmx_preemption_timer_value = in sync_vmcs02_to_vmcs12()
3693 vmcs12->guest_cr3 = vmcs_readl(GUEST_CR3); in sync_vmcs02_to_vmcs12()
3694 if (nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { in sync_vmcs02_to_vmcs12()
3695 vmcs12->guest_pdptr0 = vmcs_read64(GUEST_PDPTR0); in sync_vmcs02_to_vmcs12()
3696 vmcs12->guest_pdptr1 = vmcs_read64(GUEST_PDPTR1); in sync_vmcs02_to_vmcs12()
3697 vmcs12->guest_pdptr2 = vmcs_read64(GUEST_PDPTR2); in sync_vmcs02_to_vmcs12()
3698 vmcs12->guest_pdptr3 = vmcs_read64(GUEST_PDPTR3); in sync_vmcs02_to_vmcs12()
3702 vmcs12->guest_linear_address = vmcs_readl(GUEST_LINEAR_ADDRESS); in sync_vmcs02_to_vmcs12()
3704 if (nested_cpu_has_vid(vmcs12)) in sync_vmcs02_to_vmcs12()
3705 vmcs12->guest_intr_status = vmcs_read16(GUEST_INTR_STATUS); in sync_vmcs02_to_vmcs12()
3707 vmcs12->vm_entry_controls = in sync_vmcs02_to_vmcs12()
3708 (vmcs12->vm_entry_controls & ~VM_ENTRY_IA32E_MODE) | in sync_vmcs02_to_vmcs12()
3711 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_DEBUG_CONTROLS) in sync_vmcs02_to_vmcs12()
3712 kvm_get_dr(vcpu, 7, (unsigned long *)&vmcs12->guest_dr7); in sync_vmcs02_to_vmcs12()
3714 if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_IA32_EFER) in sync_vmcs02_to_vmcs12()
3715 vmcs12->guest_ia32_efer = vcpu->arch.efer; in sync_vmcs02_to_vmcs12()
3729 static void prepare_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, in prepare_vmcs12() argument
3734 vmcs12->vm_exit_reason = exit_reason; in prepare_vmcs12()
3735 vmcs12->exit_qualification = exit_qualification; in prepare_vmcs12()
3736 vmcs12->vm_exit_intr_info = exit_intr_info; in prepare_vmcs12()
3738 vmcs12->idt_vectoring_info_field = 0; in prepare_vmcs12()
3739 vmcs12->vm_exit_instruction_len = vmcs_read32(VM_EXIT_INSTRUCTION_LEN); in prepare_vmcs12()
3740 vmcs12->vmx_instruction_info = vmcs_read32(VMX_INSTRUCTION_INFO); in prepare_vmcs12()
3742 if (!(vmcs12->vm_exit_reason & VMX_EXIT_REASONS_FAILED_VMENTRY)) { in prepare_vmcs12()
3743 vmcs12->launch_state = 1; in prepare_vmcs12()
3747 vmcs12->vm_entry_intr_info_field &= ~INTR_INFO_VALID_MASK; in prepare_vmcs12()
3753 vmcs12_save_pending_event(vcpu, vmcs12); in prepare_vmcs12()
3762 vmcs12->vm_exit_msr_store_addr, in prepare_vmcs12()
3763 vmcs12->vm_exit_msr_store_count)) in prepare_vmcs12()
3787 struct vmcs12 *vmcs12) in load_vmcs12_host_state() argument
3792 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) in load_vmcs12_host_state()
3793 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
3794 else if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
3800 kvm_rsp_write(vcpu, vmcs12->host_rsp); in load_vmcs12_host_state()
3801 kvm_rip_write(vcpu, vmcs12->host_rip); in load_vmcs12_host_state()
3813 vmx_set_cr0(vcpu, vmcs12->host_cr0); in load_vmcs12_host_state()
3817 vmx_set_cr4(vcpu, vmcs12->host_cr4); in load_vmcs12_host_state()
3825 if (nested_vmx_load_cr3(vcpu, vmcs12->host_cr3, false, &entry_failure_code)) in load_vmcs12_host_state()
3846 (!nested_cpu_has_vpid(vmcs12) || !nested_has_guest_tlb_tag(vcpu))) { in load_vmcs12_host_state()
3850 vmcs_write32(GUEST_SYSENTER_CS, vmcs12->host_ia32_sysenter_cs); in load_vmcs12_host_state()
3851 vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->host_ia32_sysenter_esp); in load_vmcs12_host_state()
3852 vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->host_ia32_sysenter_eip); in load_vmcs12_host_state()
3853 vmcs_writel(GUEST_IDTR_BASE, vmcs12->host_idtr_base); in load_vmcs12_host_state()
3854 vmcs_writel(GUEST_GDTR_BASE, vmcs12->host_gdtr_base); in load_vmcs12_host_state()
3859 if (vmcs12->vm_exit_controls & VM_EXIT_CLEAR_BNDCFGS) in load_vmcs12_host_state()
3862 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) { in load_vmcs12_host_state()
3863 vmcs_write64(GUEST_IA32_PAT, vmcs12->host_ia32_pat); in load_vmcs12_host_state()
3864 vcpu->arch.pat = vmcs12->host_ia32_pat; in load_vmcs12_host_state()
3866 if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) in load_vmcs12_host_state()
3868 vmcs12->host_ia32_perf_global_ctrl); in load_vmcs12_host_state()
3875 .selector = vmcs12->host_cs_selector, in load_vmcs12_host_state()
3881 if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) in load_vmcs12_host_state()
3895 seg.selector = vmcs12->host_ds_selector; in load_vmcs12_host_state()
3897 seg.selector = vmcs12->host_es_selector; in load_vmcs12_host_state()
3899 seg.selector = vmcs12->host_ss_selector; in load_vmcs12_host_state()
3901 seg.selector = vmcs12->host_fs_selector; in load_vmcs12_host_state()
3902 seg.base = vmcs12->host_fs_base; in load_vmcs12_host_state()
3904 seg.selector = vmcs12->host_gs_selector; in load_vmcs12_host_state()
3905 seg.base = vmcs12->host_gs_base; in load_vmcs12_host_state()
3908 .base = vmcs12->host_tr_base, in load_vmcs12_host_state()
3910 .selector = vmcs12->host_tr_selector, in load_vmcs12_host_state()
3922 if (nested_vmx_load_msr(vcpu, vmcs12->vm_exit_msr_load_addr, in load_vmcs12_host_state()
3923 vmcs12->vm_exit_msr_load_count)) in load_vmcs12_host_state()
3952 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_restore_host_state() local
3960 if (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) { in nested_vmx_restore_host_state()
4014 for (i = 0; i < vmcs12->vm_entry_msr_load_count; i++) { in nested_vmx_restore_host_state()
4015 gpa = vmcs12->vm_entry_msr_load_addr + (i * sizeof(g)); in nested_vmx_restore_host_state()
4023 for (j = 0; j < vmcs12->vm_exit_msr_load_count; j++) { in nested_vmx_restore_host_state()
4024 gpa = vmcs12->vm_exit_msr_load_addr + (j * sizeof(h)); in nested_vmx_restore_host_state()
4067 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_vmexit() local
4074 if (nested_cpu_has_preemption_timer(vmcs12)) in nested_vmx_vmexit()
4077 if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING) in nested_vmx_vmexit()
4078 vcpu->arch.tsc_offset -= vmcs12->tsc_offset; in nested_vmx_vmexit()
4081 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4084 prepare_vmcs12(vcpu, vmcs12, exit_reason, exit_intr_info, in nested_vmx_vmexit()
4096 nested_flush_cached_shadow_vmcs12(vcpu, vmcs12); in nested_vmx_vmexit()
4122 } else if (!nested_cpu_has_ept(vmcs12) && in nested_vmx_vmexit()
4123 nested_cpu_has2(vmcs12, in nested_vmx_vmexit()
4163 vmcs12->vm_exit_intr_info = irq | in nested_vmx_vmexit()
4168 trace_kvm_nested_vmexit_inject(vmcs12->vm_exit_reason, in nested_vmx_vmexit()
4169 vmcs12->exit_qualification, in nested_vmx_vmexit()
4170 vmcs12->idt_vectoring_info_field, in nested_vmx_vmexit()
4171 vmcs12->vm_exit_intr_info, in nested_vmx_vmexit()
4172 vmcs12->vm_exit_intr_error_code, in nested_vmx_vmexit()
4175 load_vmcs12_host_state(vcpu, vmcs12); in nested_vmx_vmexit()
4575 vmptr + offsetof(struct vmcs12, in handle_vmclear()
4606 struct vmcs12 *vmcs12; in handle_vmread() local
4617 vmcs12 = get_vmcs12(vcpu); in handle_vmread()
4625 vmcs12 = get_shadow_vmcs12(vcpu); in handle_vmread()
4637 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmread()
4640 field_value = vmcs12_read_any(vmcs12, field, offset); in handle_vmread()
4704 struct vmcs12 *vmcs12; in handle_vmwrite() local
4739 vmcs12 = get_vmcs12(vcpu); in handle_vmwrite()
4746 copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in handle_vmwrite()
4754 vmcs12 = get_shadow_vmcs12(vcpu); in handle_vmwrite()
4773 vmcs12_write_any(vmcs12, field, offset, field_value); in handle_vmwrite()
4840 struct vmcs12 *new_vmcs12; in handle_vmptrld()
5042 struct vmcs12 *vmcs12) in nested_vmx_eptp_switching() argument
5049 if (!nested_cpu_has_eptp_switching(vmcs12) || in nested_vmx_eptp_switching()
5050 !nested_cpu_has_ept(vmcs12)) in nested_vmx_eptp_switching()
5057 if (kvm_vcpu_read_guest_page(vcpu, vmcs12->eptp_list_address >> PAGE_SHIFT, in nested_vmx_eptp_switching()
5067 if (vmcs12->ept_pointer != address) { in nested_vmx_eptp_switching()
5074 vmcs12->ept_pointer = address; in nested_vmx_eptp_switching()
5089 struct vmcs12 *vmcs12; in handle_vmfunc() local
5102 vmcs12 = get_vmcs12(vcpu); in handle_vmfunc()
5103 if ((vmcs12->vm_function_control & (1 << function)) == 0) in handle_vmfunc()
5108 if (nested_vmx_eptp_switching(vcpu, vmcs12)) in handle_vmfunc()
5125 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_io() argument
5133 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) in nested_vmx_exit_handled_io()
5134 return nested_cpu_has(vmcs12, CPU_BASED_UNCOND_IO_EXITING); in nested_vmx_exit_handled_io()
5146 bitmap = vmcs12->io_bitmap_a; in nested_vmx_exit_handled_io()
5148 bitmap = vmcs12->io_bitmap_b; in nested_vmx_exit_handled_io()
5174 struct vmcs12 *vmcs12, u32 exit_reason) in nested_vmx_exit_handled_msr() argument
5179 if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) in nested_vmx_exit_handled_msr()
5187 bitmap = vmcs12->msr_bitmap; in nested_vmx_exit_handled_msr()
5211 struct vmcs12 *vmcs12) in nested_vmx_exit_handled_cr() argument
5224 if (vmcs12->cr0_guest_host_mask & in nested_vmx_exit_handled_cr()
5225 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5229 if ((vmcs12->cr3_target_count >= 1 && in nested_vmx_exit_handled_cr()
5230 vmcs12->cr3_target_value0 == val) || in nested_vmx_exit_handled_cr()
5231 (vmcs12->cr3_target_count >= 2 && in nested_vmx_exit_handled_cr()
5232 vmcs12->cr3_target_value1 == val) || in nested_vmx_exit_handled_cr()
5233 (vmcs12->cr3_target_count >= 3 && in nested_vmx_exit_handled_cr()
5234 vmcs12->cr3_target_value2 == val) || in nested_vmx_exit_handled_cr()
5235 (vmcs12->cr3_target_count >= 4 && in nested_vmx_exit_handled_cr()
5236 vmcs12->cr3_target_value3 == val)) in nested_vmx_exit_handled_cr()
5238 if (nested_cpu_has(vmcs12, CPU_BASED_CR3_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5242 if (vmcs12->cr4_guest_host_mask & in nested_vmx_exit_handled_cr()
5243 (vmcs12->cr4_read_shadow ^ val)) in nested_vmx_exit_handled_cr()
5247 if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING)) in nested_vmx_exit_handled_cr()
5253 if ((vmcs12->cr0_guest_host_mask & X86_CR0_TS) && in nested_vmx_exit_handled_cr()
5254 (vmcs12->cr0_read_shadow & X86_CR0_TS)) in nested_vmx_exit_handled_cr()
5260 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
5265 if (vmcs12->cpu_based_vm_exec_control & in nested_vmx_exit_handled_cr()
5277 if (vmcs12->cr0_guest_host_mask & 0xe & in nested_vmx_exit_handled_cr()
5278 (val ^ vmcs12->cr0_read_shadow)) in nested_vmx_exit_handled_cr()
5280 if ((vmcs12->cr0_guest_host_mask & 0x1) && in nested_vmx_exit_handled_cr()
5281 !(vmcs12->cr0_read_shadow & 0x1) && in nested_vmx_exit_handled_cr()
5290 struct vmcs12 *vmcs12, gpa_t bitmap) in nested_vmx_exit_handled_vmcs_access() argument
5296 if (!nested_cpu_has_shadow_vmcs(vmcs12)) in nested_vmx_exit_handled_vmcs_access()
5322 struct vmcs12 *vmcs12 = get_vmcs12(vcpu); in nested_vmx_exit_reflected() local
5367 return vmcs12->exception_bitmap & in nested_vmx_exit_reflected()
5374 return nested_cpu_has(vmcs12, CPU_BASED_VIRTUAL_INTR_PENDING); in nested_vmx_exit_reflected()
5376 return nested_cpu_has(vmcs12, CPU_BASED_VIRTUAL_NMI_PENDING); in nested_vmx_exit_reflected()
5382 return nested_cpu_has(vmcs12, CPU_BASED_HLT_EXITING); in nested_vmx_exit_reflected()
5386 return nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_exit_reflected()
5388 return nested_cpu_has(vmcs12, CPU_BASED_RDPMC_EXITING); in nested_vmx_exit_reflected()
5390 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDRAND_EXITING); in nested_vmx_exit_reflected()
5392 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDSEED_EXITING); in nested_vmx_exit_reflected()
5394 return nested_cpu_has(vmcs12, CPU_BASED_RDTSC_EXITING); in nested_vmx_exit_reflected()
5396 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_exit_reflected()
5397 vmcs12->vmread_bitmap); in nested_vmx_exit_reflected()
5399 return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, in nested_vmx_exit_reflected()
5400 vmcs12->vmwrite_bitmap); in nested_vmx_exit_reflected()
5412 return nested_vmx_exit_handled_cr(vcpu, vmcs12); in nested_vmx_exit_reflected()
5414 return nested_cpu_has(vmcs12, CPU_BASED_MOV_DR_EXITING); in nested_vmx_exit_reflected()
5416 return nested_vmx_exit_handled_io(vcpu, vmcs12); in nested_vmx_exit_reflected()
5418 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_DESC); in nested_vmx_exit_reflected()
5421 return nested_vmx_exit_handled_msr(vcpu, vmcs12, exit_reason); in nested_vmx_exit_reflected()
5425 return nested_cpu_has(vmcs12, CPU_BASED_MWAIT_EXITING); in nested_vmx_exit_reflected()
5427 return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_TRAP_FLAG); in nested_vmx_exit_reflected()
5429 return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_EXITING); in nested_vmx_exit_reflected()
5431 return nested_cpu_has(vmcs12, CPU_BASED_PAUSE_EXITING) || in nested_vmx_exit_reflected()
5432 nested_cpu_has2(vmcs12, in nested_vmx_exit_reflected()
5437 return nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW); in nested_vmx_exit_reflected()
5465 nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_INVPCID) && in nested_vmx_exit_reflected()
5466 nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); in nested_vmx_exit_reflected()
5468 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_WBINVD_EXITING); in nested_vmx_exit_reflected()
5478 return nested_cpu_has2(vmcs12, SECONDARY_EXEC_XSAVES); in nested_vmx_exit_reflected()
5492 return nested_cpu_has2(vmcs12, in nested_vmx_exit_reflected()
5505 struct vmcs12 *vmcs12; in vmx_get_nested_state() local
5520 vmcs12 = get_vmcs12(vcpu); in vmx_get_nested_state()
5528 kvm_state.size += sizeof(user_vmx_nested_state->vmcs12); in vmx_get_nested_state()
5534 nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
5535 vmcs12->vmcs_link_pointer != -1ull) in vmx_get_nested_state()
5570 sync_vmcs02_to_vmcs12(vcpu, vmcs12); in vmx_get_nested_state()
5571 sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); in vmx_get_nested_state()
5579 BUILD_BUG_ON(sizeof(user_vmx_nested_state->vmcs12) < VMCS12_SIZE); in vmx_get_nested_state()
5586 if (copy_to_user(user_vmx_nested_state->vmcs12, vmcs12, VMCS12_SIZE)) in vmx_get_nested_state()
5589 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_get_nested_state()
5590 vmcs12->vmcs_link_pointer != -1ull) { in vmx_get_nested_state()
5617 struct vmcs12 *vmcs12; in vmx_set_nested_state() local
5690 if (kvm_state->size < sizeof(*kvm_state) + sizeof(*vmcs12)) in vmx_set_nested_state()
5717 vmcs12 = get_vmcs12(vcpu); in vmx_set_nested_state()
5718 if (copy_from_user(vmcs12, user_vmx_nested_state->vmcs12, sizeof(*vmcs12))) in vmx_set_nested_state()
5721 if (vmcs12->hdr.revision_id != VMCS12_REVISION) in vmx_set_nested_state()
5731 if (nested_cpu_has_shadow_vmcs(vmcs12) && in vmx_set_nested_state()
5732 vmcs12->vmcs_link_pointer != -1ull) { in vmx_set_nested_state()
5733 struct vmcs12 *shadow_vmcs12 = get_shadow_vmcs12(vcpu); in vmx_set_nested_state()
5737 sizeof(user_vmx_nested_state->vmcs12) + sizeof(*shadow_vmcs12)) in vmx_set_nested_state()
5752 if (nested_vmx_check_controls(vcpu, vmcs12) || in vmx_set_nested_state()
5753 nested_vmx_check_host_state(vcpu, vmcs12) || in vmx_set_nested_state()
5754 nested_vmx_check_guest_state(vcpu, vmcs12, &exit_qual)) in vmx_set_nested_state()