Lines Matching full:save
107 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context()
108 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
264 struct vmcb_save_area *save) in nested_vmcb_check_cr3_cr4() argument
271 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in nested_vmcb_check_cr3_cr4()
272 if (CC(!(save->cr4 & X86_CR4_PAE)) || in nested_vmcb_check_cr3_cr4()
273 CC(!(save->cr0 & X86_CR0_PE)) || in nested_vmcb_check_cr3_cr4()
274 CC(kvm_vcpu_is_illegal_gpa(vcpu, save->cr3))) in nested_vmcb_check_cr3_cr4()
278 if (CC(!kvm_is_valid_cr4(vcpu, save->cr4))) in nested_vmcb_check_cr3_cr4()
286 struct vmcb_save_area *save) in nested_vmcb_valid_sregs() argument
290 * to avoid TOC/TOU races. For these save area checks in nested_vmcb_valid_sregs()
295 if (CC(!(save->efer & EFER_SVME))) in nested_vmcb_valid_sregs()
298 if (CC((save->cr0 & X86_CR0_CD) == 0 && (save->cr0 & X86_CR0_NW)) || in nested_vmcb_valid_sregs()
299 CC(save->cr0 & ~0xffffffffULL)) in nested_vmcb_valid_sregs()
302 if (CC(!kvm_dr6_valid(save->dr6)) || CC(!kvm_dr7_valid(save->dr7))) in nested_vmcb_valid_sregs()
305 if (!nested_vmcb_check_cr3_cr4(vcpu, save)) in nested_vmcb_valid_sregs()
308 if (CC(!kvm_valid_efer(vcpu, save->efer))) in nested_vmcb_valid_sregs()
446 svm->nested.vmcb02.ptr->save.g_pat = svm->vmcb01.ptr->save.g_pat; in nested_vmcb02_compute_g_pat()
462 svm->vmcb->save.es = vmcb12->save.es; in nested_vmcb02_prepare_save()
463 svm->vmcb->save.cs = vmcb12->save.cs; in nested_vmcb02_prepare_save()
464 svm->vmcb->save.ss = vmcb12->save.ss; in nested_vmcb02_prepare_save()
465 svm->vmcb->save.ds = vmcb12->save.ds; in nested_vmcb02_prepare_save()
466 svm->vmcb->save.cpl = vmcb12->save.cpl; in nested_vmcb02_prepare_save()
471 svm->vmcb->save.gdtr = vmcb12->save.gdtr; in nested_vmcb02_prepare_save()
472 svm->vmcb->save.idtr = vmcb12->save.idtr; in nested_vmcb02_prepare_save()
476 kvm_set_rflags(&svm->vcpu, vmcb12->save.rflags | X86_EFLAGS_FIXED); in nested_vmcb02_prepare_save()
483 svm_set_efer(&svm->vcpu, vmcb12->save.efer | EFER_SVME); in nested_vmcb02_prepare_save()
485 svm_set_cr0(&svm->vcpu, vmcb12->save.cr0); in nested_vmcb02_prepare_save()
486 svm_set_cr4(&svm->vcpu, vmcb12->save.cr4); in nested_vmcb02_prepare_save()
488 svm->vcpu.arch.cr2 = vmcb12->save.cr2; in nested_vmcb02_prepare_save()
490 kvm_rax_write(&svm->vcpu, vmcb12->save.rax); in nested_vmcb02_prepare_save()
491 kvm_rsp_write(&svm->vcpu, vmcb12->save.rsp); in nested_vmcb02_prepare_save()
492 kvm_rip_write(&svm->vcpu, vmcb12->save.rip); in nested_vmcb02_prepare_save()
495 svm->vmcb->save.rax = vmcb12->save.rax; in nested_vmcb02_prepare_save()
496 svm->vmcb->save.rsp = vmcb12->save.rsp; in nested_vmcb02_prepare_save()
497 svm->vmcb->save.rip = vmcb12->save.rip; in nested_vmcb02_prepare_save()
501 svm->vmcb->save.dr7 = vmcb12->save.dr7 | DR7_FIXED_1; in nested_vmcb02_prepare_save()
502 svm->vcpu.arch.dr6 = vmcb12->save.dr6 | DR6_ACTIVE_LOW; in nested_vmcb02_prepare_save()
577 to_vmcb->save.spec_ctrl = from_vmcb->save.spec_ctrl; in nested_svm_copy_common_state()
586 trace_kvm_nested_vmrun(svm->vmcb->save.rip, vmcb12_gpa, in enter_svm_guest_mode()
587 vmcb12->save.rip, in enter_svm_guest_mode()
610 ret = nested_svm_load_cr3(&svm->vcpu, vmcb12->save.cr3, in enter_svm_guest_mode()
644 vmcb12_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
662 if (!nested_vmcb_valid_sregs(vcpu, &vmcb12->save) || in nested_svm_vmrun()
675 svm->vmcb01.ptr->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
676 svm->vmcb01.ptr->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmrun()
677 svm->vmcb01.ptr->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun()
678 svm->vmcb01.ptr->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmrun()
679 svm->vmcb01.ptr->save.rip = kvm_rip_read(vcpu); in nested_svm_vmrun()
682 svm->vmcb01.ptr->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmrun()
708 /* Copy state save area fields which are handled by VMRUN */
731 to_vmcb->save.fs = from_vmcb->save.fs; in svm_copy_vmloadsave_state()
732 to_vmcb->save.gs = from_vmcb->save.gs; in svm_copy_vmloadsave_state()
733 to_vmcb->save.tr = from_vmcb->save.tr; in svm_copy_vmloadsave_state()
734 to_vmcb->save.ldtr = from_vmcb->save.ldtr; in svm_copy_vmloadsave_state()
735 to_vmcb->save.kernel_gs_base = from_vmcb->save.kernel_gs_base; in svm_copy_vmloadsave_state()
736 to_vmcb->save.star = from_vmcb->save.star; in svm_copy_vmloadsave_state()
737 to_vmcb->save.lstar = from_vmcb->save.lstar; in svm_copy_vmloadsave_state()
738 to_vmcb->save.cstar = from_vmcb->save.cstar; in svm_copy_vmloadsave_state()
739 to_vmcb->save.sfmask = from_vmcb->save.sfmask; in svm_copy_vmloadsave_state()
740 to_vmcb->save.sysenter_cs = from_vmcb->save.sysenter_cs; in svm_copy_vmloadsave_state()
741 to_vmcb->save.sysenter_esp = from_vmcb->save.sysenter_esp; in svm_copy_vmloadsave_state()
742 to_vmcb->save.sysenter_eip = from_vmcb->save.sysenter_eip; in svm_copy_vmloadsave_state()
777 vmcb12->save.es = vmcb->save.es; in nested_svm_vmexit()
778 vmcb12->save.cs = vmcb->save.cs; in nested_svm_vmexit()
779 vmcb12->save.ss = vmcb->save.ss; in nested_svm_vmexit()
780 vmcb12->save.ds = vmcb->save.ds; in nested_svm_vmexit()
781 vmcb12->save.gdtr = vmcb->save.gdtr; in nested_svm_vmexit()
782 vmcb12->save.idtr = vmcb->save.idtr; in nested_svm_vmexit()
783 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
784 vmcb12->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmexit()
785 vmcb12->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmexit()
786 vmcb12->save.cr2 = vmcb->save.cr2; in nested_svm_vmexit()
787 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
788 vmcb12->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmexit()
789 vmcb12->save.rip = kvm_rip_read(vcpu); in nested_svm_vmexit()
790 vmcb12->save.rsp = kvm_rsp_read(vcpu); in nested_svm_vmexit()
791 vmcb12->save.rax = kvm_rax_read(vcpu); in nested_svm_vmexit()
792 vmcb12->save.dr7 = vmcb->save.dr7; in nested_svm_vmexit()
793 vmcb12->save.dr6 = svm->vcpu.arch.dr6; in nested_svm_vmexit()
794 vmcb12->save.cpl = vmcb->save.cpl; in nested_svm_vmexit()
840 kvm_set_rflags(vcpu, svm->vmcb->save.rflags); in nested_svm_vmexit()
841 svm_set_efer(vcpu, svm->vmcb->save.efer); in nested_svm_vmexit()
842 svm_set_cr0(vcpu, svm->vmcb->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
843 svm_set_cr4(vcpu, svm->vmcb->save.cr4); in nested_svm_vmexit()
844 kvm_rax_write(vcpu, svm->vmcb->save.rax); in nested_svm_vmexit()
845 kvm_rsp_write(vcpu, svm->vmcb->save.rsp); in nested_svm_vmexit()
846 kvm_rip_write(vcpu, svm->vmcb->save.rip); in nested_svm_vmexit()
864 rc = nested_svm_load_cr3(vcpu, svm->vmcb->save.cr3, false, true); in nested_svm_vmexit()
882 if (unlikely(svm->vmcb->save.rflags & X86_EFLAGS_TF)) in nested_svm_vmexit()
1077 if (to_svm(vcpu)->vmcb->save.cpl) { in nested_svm_check_permissions()
1186 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in svm_check_nested_events()
1271 if (copy_to_user(&user_vmcb->save, &svm->vmcb01.ptr->save, in svm_get_nested_state()
1272 sizeof(user_vmcb->save))) in svm_get_nested_state()
1286 struct vmcb_save_area *save; in svm_set_nested_state() local
1328 save = kzalloc(sizeof(*save), GFP_KERNEL_ACCOUNT); in svm_set_nested_state()
1329 if (!ctl || !save) in svm_set_nested_state()
1335 if (copy_from_user(save, &user_vmcb->save, sizeof(*save))) in svm_set_nested_state()
1354 if (!(save->cr0 & X86_CR0_PG) || in svm_set_nested_state()
1355 !(save->cr0 & X86_CR0_PE) || in svm_set_nested_state()
1356 (save->rflags & X86_EFLAGS_VM) || in svm_set_nested_state()
1357 !nested_vmcb_valid_sregs(vcpu, save)) in svm_set_nested_state()
1376 * vmcb02, and the L1 save state which we store in vmcb01. in svm_set_nested_state()
1383 svm->nested.vmcb02.ptr->save = svm->vmcb01.ptr->save; in svm_set_nested_state()
1392 svm_copy_vmrun_state(&svm->vmcb01.ptr->save, save); in svm_set_nested_state()
1400 kfree(save); in svm_set_nested_state()