Lines Matching full:save
93 kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4, in nested_svm_init_mmu_context()
94 svm->vmcb01.ptr->save.efer, in nested_svm_init_mmu_context()
152 if (!(svm->vmcb01.ptr->save.rflags & X86_EFLAGS_IF)) in recalc_intercepts()
285 struct vmcb_save_area_cached *save) in __nested_vmcb_check_save() argument
287 if (CC(!(save->efer & EFER_SVME))) in __nested_vmcb_check_save()
290 if (CC((save->cr0 & X86_CR0_CD) == 0 && (save->cr0 & X86_CR0_NW)) || in __nested_vmcb_check_save()
291 CC(save->cr0 & ~0xffffffffULL)) in __nested_vmcb_check_save()
294 if (CC(!kvm_dr6_valid(save->dr6)) || CC(!kvm_dr7_valid(save->dr7))) in __nested_vmcb_check_save()
302 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) { in __nested_vmcb_check_save()
303 if (CC(!(save->cr4 & X86_CR4_PAE)) || in __nested_vmcb_check_save()
304 CC(!(save->cr0 & X86_CR0_PE)) || in __nested_vmcb_check_save()
305 CC(!kvm_vcpu_is_legal_cr3(vcpu, save->cr3))) in __nested_vmcb_check_save()
310 if (CC(!__kvm_is_valid_cr4(vcpu, save->cr4))) in __nested_vmcb_check_save()
313 if (CC(!kvm_valid_efer(vcpu, save->efer))) in __nested_vmcb_check_save()
322 struct vmcb_save_area_cached *save = &svm->nested.save; in nested_vmcb_check_save() local
324 return __nested_vmcb_check_save(vcpu, save); in nested_vmcb_check_save()
405 struct vmcb_save_area *save) in nested_copy_vmcb_save_to_cache() argument
407 __nested_copy_vmcb_save_to_cache(&svm->nested.save, save); in nested_copy_vmcb_save_to_cache()
534 svm->nested.vmcb02.ptr->save.g_pat = svm->vmcb01.ptr->save.g_pat; in nested_vmcb02_compute_g_pat()
554 vmcb02->save.es = vmcb12->save.es; in nested_vmcb02_prepare_save()
555 vmcb02->save.cs = vmcb12->save.cs; in nested_vmcb02_prepare_save()
556 vmcb02->save.ss = vmcb12->save.ss; in nested_vmcb02_prepare_save()
557 vmcb02->save.ds = vmcb12->save.ds; in nested_vmcb02_prepare_save()
558 vmcb02->save.cpl = vmcb12->save.cpl; in nested_vmcb02_prepare_save()
563 vmcb02->save.gdtr = vmcb12->save.gdtr; in nested_vmcb02_prepare_save()
564 vmcb02->save.idtr = vmcb12->save.idtr; in nested_vmcb02_prepare_save()
568 kvm_set_rflags(vcpu, vmcb12->save.rflags | X86_EFLAGS_FIXED); in nested_vmcb02_prepare_save()
570 svm_set_efer(vcpu, svm->nested.save.efer); in nested_vmcb02_prepare_save()
572 svm_set_cr0(vcpu, svm->nested.save.cr0); in nested_vmcb02_prepare_save()
573 svm_set_cr4(vcpu, svm->nested.save.cr4); in nested_vmcb02_prepare_save()
575 svm->vcpu.arch.cr2 = vmcb12->save.cr2; in nested_vmcb02_prepare_save()
577 kvm_rax_write(vcpu, vmcb12->save.rax); in nested_vmcb02_prepare_save()
578 kvm_rsp_write(vcpu, vmcb12->save.rsp); in nested_vmcb02_prepare_save()
579 kvm_rip_write(vcpu, vmcb12->save.rip); in nested_vmcb02_prepare_save()
582 vmcb02->save.rax = vmcb12->save.rax; in nested_vmcb02_prepare_save()
583 vmcb02->save.rsp = vmcb12->save.rsp; in nested_vmcb02_prepare_save()
584 vmcb02->save.rip = vmcb12->save.rip; in nested_vmcb02_prepare_save()
588 vmcb02->save.dr7 = svm->nested.save.dr7 | DR7_FIXED_1; in nested_vmcb02_prepare_save()
589 svm->vcpu.arch.dr6 = svm->nested.save.dr6 | DR6_ACTIVE_LOW; in nested_vmcb02_prepare_save()
600 vmcb02->save.dbgctl &= ~DEBUGCTL_RESERVED_BITS; in nested_vmcb02_prepare_save()
782 to_vmcb->save.spec_ctrl = from_vmcb->save.spec_ctrl; in nested_svm_copy_common_state()
791 trace_kvm_nested_vmenter(svm->vmcb->save.rip, in enter_svm_guest_mode()
793 vmcb12->save.rip, in enter_svm_guest_mode()
798 vmcb12->save.cr3, in enter_svm_guest_mode()
816 nested_vmcb02_prepare_control(svm, vmcb12->save.rip, vmcb12->save.cs.base); in enter_svm_guest_mode()
819 ret = nested_svm_load_cr3(&svm->vcpu, svm->nested.save.cr3, in enter_svm_guest_mode()
863 vmcb12_gpa = svm->vmcb->save.rax; in nested_svm_vmrun()
880 nested_copy_vmcb_save_to_cache(svm, &vmcb12->save); in nested_svm_vmrun()
895 vmcb01->save.efer = vcpu->arch.efer; in nested_svm_vmrun()
896 vmcb01->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmrun()
897 vmcb01->save.cr4 = vcpu->arch.cr4; in nested_svm_vmrun()
898 vmcb01->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmrun()
899 vmcb01->save.rip = kvm_rip_read(vcpu); in nested_svm_vmrun()
902 vmcb01->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmrun()
930 /* Copy state save area fields which are handled by VMRUN */
953 to_vmcb->save.fs = from_vmcb->save.fs; in svm_copy_vmloadsave_state()
954 to_vmcb->save.gs = from_vmcb->save.gs; in svm_copy_vmloadsave_state()
955 to_vmcb->save.tr = from_vmcb->save.tr; in svm_copy_vmloadsave_state()
956 to_vmcb->save.ldtr = from_vmcb->save.ldtr; in svm_copy_vmloadsave_state()
957 to_vmcb->save.kernel_gs_base = from_vmcb->save.kernel_gs_base; in svm_copy_vmloadsave_state()
958 to_vmcb->save.star = from_vmcb->save.star; in svm_copy_vmloadsave_state()
959 to_vmcb->save.lstar = from_vmcb->save.lstar; in svm_copy_vmloadsave_state()
960 to_vmcb->save.cstar = from_vmcb->save.cstar; in svm_copy_vmloadsave_state()
961 to_vmcb->save.sfmask = from_vmcb->save.sfmask; in svm_copy_vmloadsave_state()
962 to_vmcb->save.sysenter_cs = from_vmcb->save.sysenter_cs; in svm_copy_vmloadsave_state()
963 to_vmcb->save.sysenter_esp = from_vmcb->save.sysenter_esp; in svm_copy_vmloadsave_state()
964 to_vmcb->save.sysenter_eip = from_vmcb->save.sysenter_eip; in svm_copy_vmloadsave_state()
997 vmcb12->save.es = vmcb02->save.es; in nested_svm_vmexit()
998 vmcb12->save.cs = vmcb02->save.cs; in nested_svm_vmexit()
999 vmcb12->save.ss = vmcb02->save.ss; in nested_svm_vmexit()
1000 vmcb12->save.ds = vmcb02->save.ds; in nested_svm_vmexit()
1001 vmcb12->save.gdtr = vmcb02->save.gdtr; in nested_svm_vmexit()
1002 vmcb12->save.idtr = vmcb02->save.idtr; in nested_svm_vmexit()
1003 vmcb12->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
1004 vmcb12->save.cr0 = kvm_read_cr0(vcpu); in nested_svm_vmexit()
1005 vmcb12->save.cr3 = kvm_read_cr3(vcpu); in nested_svm_vmexit()
1006 vmcb12->save.cr2 = vmcb02->save.cr2; in nested_svm_vmexit()
1007 vmcb12->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
1008 vmcb12->save.rflags = kvm_get_rflags(vcpu); in nested_svm_vmexit()
1009 vmcb12->save.rip = kvm_rip_read(vcpu); in nested_svm_vmexit()
1010 vmcb12->save.rsp = kvm_rsp_read(vcpu); in nested_svm_vmexit()
1011 vmcb12->save.rax = kvm_rax_read(vcpu); in nested_svm_vmexit()
1012 vmcb12->save.dr7 = vmcb02->save.dr7; in nested_svm_vmexit()
1013 vmcb12->save.dr6 = svm->vcpu.arch.dr6; in nested_svm_vmexit()
1014 vmcb12->save.cpl = vmcb02->save.cpl; in nested_svm_vmexit()
1111 kvm_set_rflags(vcpu, vmcb01->save.rflags); in nested_svm_vmexit()
1112 svm_set_efer(vcpu, vmcb01->save.efer); in nested_svm_vmexit()
1113 svm_set_cr0(vcpu, vmcb01->save.cr0 | X86_CR0_PE); in nested_svm_vmexit()
1114 svm_set_cr4(vcpu, vmcb01->save.cr4); in nested_svm_vmexit()
1115 kvm_rax_write(vcpu, vmcb01->save.rax); in nested_svm_vmexit()
1116 kvm_rsp_write(vcpu, vmcb01->save.rsp); in nested_svm_vmexit()
1117 kvm_rip_write(vcpu, vmcb01->save.rip); in nested_svm_vmexit()
1135 rc = nested_svm_load_cr3(vcpu, vmcb01->save.cr3, false, true); in nested_svm_vmexit()
1153 if (unlikely(vmcb01->save.rflags & X86_EFLAGS_TF)) in nested_svm_vmexit()
1364 if (to_svm(vcpu)->vmcb->save.cpl) { in nested_svm_check_permissions()
1488 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in svm_check_nested_events()
1638 if (copy_to_user(&user_vmcb->save, &svm->vmcb01.ptr->save, in svm_get_nested_state()
1639 sizeof(user_vmcb->save))) in svm_get_nested_state()
1653 struct vmcb_save_area *save; in svm_set_nested_state() local
1697 save = kzalloc(sizeof(*save), GFP_KERNEL_ACCOUNT); in svm_set_nested_state()
1698 if (!ctl || !save) in svm_set_nested_state()
1704 if (copy_from_user(save, &user_vmcb->save, sizeof(*save))) in svm_set_nested_state()
1724 __nested_copy_vmcb_save_to_cache(&save_cached, save); in svm_set_nested_state()
1725 if (!(save->cr0 & X86_CR0_PG) || in svm_set_nested_state()
1726 !(save->cr0 & X86_CR0_PE) || in svm_set_nested_state()
1727 (save->rflags & X86_EFLAGS_VM) || in svm_set_nested_state()
1735 * vmcb02, and the L1 save state which we store in vmcb01. in svm_set_nested_state()
1742 svm->nested.vmcb02.ptr->save = svm->vmcb01.ptr->save; in svm_set_nested_state()
1751 svm_copy_vmrun_state(&svm->vmcb01.ptr->save, save); in svm_set_nested_state()
1755 nested_vmcb02_prepare_control(svm, svm->vmcb->save.rip, svm->vmcb->save.cs.base); in svm_set_nested_state()
1774 kfree(save); in svm_set_nested_state()