Lines Matching full:save
97 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context()
98 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
156 if (!(svm->vmcb01.ptr->save.rflags & X86_EFLAGS_IF)) in recalc_intercepts()
356 struct vmcb_save_area_cached *save) in __nested_vmcb_check_save() argument
358 if (CC(!(save->efer & EFER_SVME))) in __nested_vmcb_check_save()
361 if (CC((save->cr0 & X86_CR0_CD) == 0 && (save->cr0 & X86_CR0_NW)) || in __nested_vmcb_check_save()
362 CC(save->cr0 & ~0xffffffffULL)) in __nested_vmcb_check_save()
365 if (CC(!kvm_dr6_valid(save->dr6)) || CC(!kvm_dr7_valid(save->dr7))) in __nested_vmcb_check_save()
373 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in __nested_vmcb_check_save()
374 if (CC(!(save->cr4 & X86_CR4_PAE)) || in __nested_vmcb_check_save()
375 CC(!(save->cr0 & X86_CR0_PE)) || in __nested_vmcb_check_save()
376 CC(!kvm_vcpu_is_legal_cr3(vcpu, save->cr3))) in __nested_vmcb_check_save()
381 if (CC(!__kvm_is_valid_cr4(vcpu, save->cr4))) in __nested_vmcb_check_save()
384 if (CC(!kvm_valid_efer(vcpu, save->efer))) in __nested_vmcb_check_save()
393 struct vmcb_save_area_cached *save = &svm->nested.save; in nested_vmcb_check_save() local
395 return __nested_vmcb_check_save(vcpu, save); in nested_vmcb_check_save()
476 struct vmcb_save_area *save) in nested_copy_vmcb_save_to_cache() argument
478 __nested_copy_vmcb_save_to_cache(&svm->nested.save, save); in nested_copy_vmcb_save_to_cache()
605 svm->nested.vmcb02.ptr->save.g_pat = svm->vmcb01.ptr->save.g_pat; in nested_vmcb02_compute_g_pat()
625 vmcb02->save.es = vmcb12->save.es; in nested_vmcb02_prepare_save()
626 vmcb02->save.cs = vmcb12->save.cs; in nested_vmcb02_prepare_save()
627 vmcb02->save.ss = vmcb12->save.ss; in nested_vmcb02_prepare_save()
628 vmcb02->save.ds = vmcb12->save.ds; in nested_vmcb02_prepare_save()
629 vmcb02->save.cpl = vmcb12->save.cpl; in nested_vmcb02_prepare_save()
634 vmcb02->save.gdtr = vmcb12->save.gdtr; in nested_vmcb02_prepare_save()
635 vmcb02->save.idtr = vmcb12->save.idtr; in nested_vmcb02_prepare_save()
641 vmcb02->save.s_cet = vmcb12->save.s_cet; in nested_vmcb02_prepare_save()
642 vmcb02->save.isst_addr = vmcb12->save.isst_addr; in nested_vmcb02_prepare_save()
643 vmcb02->save.ssp = vmcb12->save.ssp; in nested_vmcb02_prepare_save()
647 kvm_set_rflags(vcpu, vmcb12->save.rflags | X86_EFLAGS_FIXED); in nested_vmcb02_prepare_save()
649 svm_set_efer(vcpu, svm->nested.save.efer); in nested_vmcb02_prepare_save()
651 svm_set_cr0(vcpu, svm->nested.save.cr0); in nested_vmcb02_prepare_save()
652 svm_set_cr4(vcpu, svm->nested.save.cr4); in nested_vmcb02_prepare_save()
654 svm->vcpu.arch.cr2 = vmcb12->save.cr2; in nested_vmcb02_prepare_save()
656 kvm_rax_write(vcpu, vmcb12->save.rax); in nested_vmcb02_prepare_save()
657 kvm_rsp_write(vcpu, vmcb12->save.rsp); in nested_vmcb02_prepare_save()
658 kvm_rip_write(vcpu, vmcb12->save.rip); in nested_vmcb02_prepare_save()
661 vmcb02->save.rax = vmcb12->save.rax; in nested_vmcb02_prepare_save()
662 vmcb02->save.rsp = vmcb12->save.rsp; in nested_vmcb02_prepare_save()
663 vmcb02->save.rip = vmcb12->save.rip; in nested_vmcb02_prepare_save()
667 vmcb02->save.dr7 = svm->nested.save.dr7 | DR7_FIXED_1; in nested_vmcb02_prepare_save()
668 svm->vcpu.arch.dr6 = svm->nested.save.dr6 | DR6_ACTIVE_LOW; in nested_vmcb02_prepare_save()
679 vmcb02->save.dbgctl &= ~DEBUGCTL_RESERVED_BITS; in nested_vmcb02_prepare_save()
778 if (vmcb02->save.rip && (svm->nested.ctl.bus_lock_rip == vmcb02->save.rip)) in nested_vmcb02_prepare_control()
888 to_vmcb->save.spec_ctrl = from_vmcb->save.spec_ctrl; in nested_svm_copy_common_state()
897 trace_kvm_nested_vmenter(svm->vmcb->save.rip, in enter_svm_guest_mode()
899 vmcb12->save.rip, in enter_svm_guest_mode()
904 vmcb12->save.cr3, in enter_svm_guest_mode()
922 nested_vmcb02_prepare_control(svm, vmcb12->save.rip, vmcb12->save.cs.base); in enter_svm_guest_mode()
925 ret = nested_svm_load_cr3(&svm->vcpu, svm->nested.save.cr3, in enter_svm_guest_mode()
969 vmcb12_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
986 nested_copy_vmcb_save_to_cache(svm, &vmcb12->save); in nested_svm_vmrun()
1001 vmcb01->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
1002 vmcb01->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmrun()
1003 vmcb01->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun()
1004 vmcb01->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmrun()
1005 vmcb01->save.rip = kvm_rip_read(vcpu); in nested_svm_vmrun()
1008 vmcb01->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmrun()
1036 /* Copy state save area fields which are handled by VMRUN */
1065 to_vmcb->save.fs = from_vmcb->save.fs; in svm_copy_vmloadsave_state()
1066 to_vmcb->save.gs = from_vmcb->save.gs; in svm_copy_vmloadsave_state()
1067 to_vmcb->save.tr = from_vmcb->save.tr; in svm_copy_vmloadsave_state()
1068 to_vmcb->save.ldtr = from_vmcb->save.ldtr; in svm_copy_vmloadsave_state()
1069 to_vmcb->save.kernel_gs_base = from_vmcb->save.kernel_gs_base; in svm_copy_vmloadsave_state()
1070 to_vmcb->save.star = from_vmcb->save.star; in svm_copy_vmloadsave_state()
1071 to_vmcb->save.lstar = from_vmcb->save.lstar; in svm_copy_vmloadsave_state()
1072 to_vmcb->save.cstar = from_vmcb->save.cstar; in svm_copy_vmloadsave_state()
1073 to_vmcb->save.sfmask = from_vmcb->save.sfmask; in svm_copy_vmloadsave_state()
1074 to_vmcb->save.sysenter_cs = from_vmcb->save.sysenter_cs; in svm_copy_vmloadsave_state()
1075 to_vmcb->save.sysenter_esp = from_vmcb->save.sysenter_esp; in svm_copy_vmloadsave_state()
1076 to_vmcb->save.sysenter_eip = from_vmcb->save.sysenter_eip; in svm_copy_vmloadsave_state()
1109 vmcb12->save.es = vmcb02->save.es; in nested_svm_vmexit()
1110 vmcb12->save.cs = vmcb02->save.cs; in nested_svm_vmexit()
1111 vmcb12->save.ss = vmcb02->save.ss; in nested_svm_vmexit()
1112 vmcb12->save.ds = vmcb02->save.ds; in nested_svm_vmexit()
1113 vmcb12->save.gdtr = vmcb02->save.gdtr; in nested_svm_vmexit()
1114 vmcb12->save.idtr = vmcb02->save.idtr; in nested_svm_vmexit()
1115 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
1116 vmcb12->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmexit()
1117 vmcb12->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmexit()
1118 vmcb12->save.cr2 = vmcb02->save.cr2; in nested_svm_vmexit()
1119 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
1120 vmcb12->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmexit()
1121 vmcb12->save.rip = kvm_rip_read(vcpu); in nested_svm_vmexit()
1122 vmcb12->save.rsp = kvm_rsp_read(vcpu); in nested_svm_vmexit()
1123 vmcb12->save.rax = kvm_rax_read(vcpu); in nested_svm_vmexit()
1124 vmcb12->save.dr7 = vmcb02->save.dr7; in nested_svm_vmexit()
1125 vmcb12->save.dr6 = svm->vcpu.arch.dr6; in nested_svm_vmexit()
1126 vmcb12->save.cpl = vmcb02->save.cpl; in nested_svm_vmexit()
1129 vmcb12->save.s_cet = vmcb02->save.s_cet; in nested_svm_vmexit()
1130 vmcb12->save.isst_addr = vmcb02->save.isst_addr; in nested_svm_vmexit()
1131 vmcb12->save.ssp = vmcb02->save.ssp; in nested_svm_vmexit()
1238 kvm_set_rflags(vcpu, vmcb01->save.rflags); in nested_svm_vmexit()
1239 svm_set_efer(vcpu, vmcb01->save.efer); in nested_svm_vmexit()
1240 svm_set_cr0(vcpu, vmcb01->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
1241 svm_set_cr4(vcpu, vmcb01->save.cr4); in nested_svm_vmexit()
1242 kvm_rax_write(vcpu, vmcb01->save.rax); in nested_svm_vmexit()
1243 kvm_rsp_write(vcpu, vmcb01->save.rsp); in nested_svm_vmexit()
1244 kvm_rip_write(vcpu, vmcb01->save.rip); in nested_svm_vmexit()
1262 rc = nested_svm_load_cr3(vcpu, vmcb01->save.cr3, false, true); in nested_svm_vmexit()
1280 if (unlikely(vmcb01->save.rflags & X86_EFLAGS_TF)) in nested_svm_vmexit()
1490 if (to_svm(vcpu)->vmcb->save.cpl) { in nested_svm_check_permissions()
1614 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in svm_check_nested_events()
1764 if (copy_to_user(&user_vmcb->save, &svm->vmcb01.ptr->save, in svm_get_nested_state()
1765 sizeof(user_vmcb->save))) in svm_get_nested_state()
1779 struct vmcb_save_area *save; in svm_set_nested_state() local
1825 save = memdup_user(&user_vmcb->save, sizeof(*save)); in svm_set_nested_state()
1826 if (IS_ERR(save)) { in svm_set_nested_state()
1828 return PTR_ERR(save); in svm_set_nested_state()
1848 __nested_copy_vmcb_save_to_cache(&save_cached, save); in svm_set_nested_state()
1849 if (!(save->cr0 & X86_CR0_PG) || in svm_set_nested_state()
1850 !(save->cr0 & X86_CR0_PE) || in svm_set_nested_state()
1851 (save->rflags & X86_EFLAGS_VM) || in svm_set_nested_state()
1859 * vmcb02, and the L1 save state which we store in vmcb01. in svm_set_nested_state()
1866 svm->nested.vmcb02.ptr->save = svm->vmcb01.ptr->save; in svm_set_nested_state()
1875 svm_copy_vmrun_state(&svm->vmcb01.ptr->save, save); in svm_set_nested_state()
1879 nested_vmcb02_prepare_control(svm, svm->vmcb->save.rip, svm->vmcb->save.cs.base); in svm_set_nested_state()
1898 kfree(save); in svm_set_nested_state()