Lines Matching full:vcpu

112 void kvm_smm_changed(struct kvm_vcpu *vcpu, bool entering_smm)  in kvm_smm_changed()  argument
114 trace_kvm_smm_transition(vcpu->vcpu_id, vcpu->arch.smbase, entering_smm); in kvm_smm_changed()
117 vcpu->arch.hflags |= HF_SMM_MASK; in kvm_smm_changed()
119 vcpu->arch.hflags &= ~(HF_SMM_MASK | HF_SMM_INSIDE_NMI_MASK); in kvm_smm_changed()
122 kvm_make_request(KVM_REQ_EVENT, vcpu); in kvm_smm_changed()
129 vcpu->arch.pdptrs_from_userspace = false; in kvm_smm_changed()
132 kvm_mmu_reset_context(vcpu); in kvm_smm_changed()
135 void process_smi(struct kvm_vcpu *vcpu) in process_smi() argument
137 vcpu->arch.smi_pending = true; in process_smi()
138 kvm_make_request(KVM_REQ_EVENT, vcpu); in process_smi()
155 static void enter_smm_save_seg_32(struct kvm_vcpu *vcpu, in enter_smm_save_seg_32() argument
161 kvm_get_segment(vcpu, &seg, n); in enter_smm_save_seg_32()
169 static void enter_smm_save_seg_64(struct kvm_vcpu *vcpu, in enter_smm_save_seg_64() argument
175 kvm_get_segment(vcpu, &seg, n); in enter_smm_save_seg_64()
183 static void enter_smm_save_state_32(struct kvm_vcpu *vcpu, in enter_smm_save_state_32() argument
190 smram->cr0 = kvm_read_cr0(vcpu); in enter_smm_save_state_32()
191 smram->cr3 = kvm_read_cr3(vcpu); in enter_smm_save_state_32()
192 smram->eflags = kvm_get_rflags(vcpu); in enter_smm_save_state_32()
193 smram->eip = kvm_rip_read(vcpu); in enter_smm_save_state_32()
196 smram->gprs[i] = kvm_register_read_raw(vcpu, i); in enter_smm_save_state_32()
198 kvm_get_dr(vcpu, 6, &val); in enter_smm_save_state_32()
200 kvm_get_dr(vcpu, 7, &val); in enter_smm_save_state_32()
203 enter_smm_save_seg_32(vcpu, &smram->tr, &smram->tr_sel, VCPU_SREG_TR); in enter_smm_save_state_32()
204 enter_smm_save_seg_32(vcpu, &smram->ldtr, &smram->ldtr_sel, VCPU_SREG_LDTR); in enter_smm_save_state_32()
206 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_32()
210 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_32()
214 enter_smm_save_seg_32(vcpu, &smram->es, &smram->es_sel, VCPU_SREG_ES); in enter_smm_save_state_32()
215 enter_smm_save_seg_32(vcpu, &smram->cs, &smram->cs_sel, VCPU_SREG_CS); in enter_smm_save_state_32()
216 enter_smm_save_seg_32(vcpu, &smram->ss, &smram->ss_sel, VCPU_SREG_SS); in enter_smm_save_state_32()
218 enter_smm_save_seg_32(vcpu, &smram->ds, &smram->ds_sel, VCPU_SREG_DS); in enter_smm_save_state_32()
219 enter_smm_save_seg_32(vcpu, &smram->fs, &smram->fs_sel, VCPU_SREG_FS); in enter_smm_save_state_32()
220 enter_smm_save_seg_32(vcpu, &smram->gs, &smram->gs_sel, VCPU_SREG_GS); in enter_smm_save_state_32()
222 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_32()
224 smram->smbase = vcpu->arch.smbase; in enter_smm_save_state_32()
226 smram->int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in enter_smm_save_state_32()
230 static void enter_smm_save_state_64(struct kvm_vcpu *vcpu, in enter_smm_save_state_64() argument
238 smram->gprs[15 - i] = kvm_register_read_raw(vcpu, i); in enter_smm_save_state_64()
240 smram->rip = kvm_rip_read(vcpu); in enter_smm_save_state_64()
241 smram->rflags = kvm_get_rflags(vcpu); in enter_smm_save_state_64()
244 kvm_get_dr(vcpu, 6, &val); in enter_smm_save_state_64()
246 kvm_get_dr(vcpu, 7, &val); in enter_smm_save_state_64()
249 smram->cr0 = kvm_read_cr0(vcpu); in enter_smm_save_state_64()
250 smram->cr3 = kvm_read_cr3(vcpu); in enter_smm_save_state_64()
251 smram->cr4 = kvm_read_cr4(vcpu); in enter_smm_save_state_64()
253 smram->smbase = vcpu->arch.smbase; in enter_smm_save_state_64()
256 smram->efer = vcpu->arch.efer; in enter_smm_save_state_64()
258 enter_smm_save_seg_64(vcpu, &smram->tr, VCPU_SREG_TR); in enter_smm_save_state_64()
260 static_call(kvm_x86_get_idt)(vcpu, &dt); in enter_smm_save_state_64()
264 enter_smm_save_seg_64(vcpu, &smram->ldtr, VCPU_SREG_LDTR); in enter_smm_save_state_64()
266 static_call(kvm_x86_get_gdt)(vcpu, &dt); in enter_smm_save_state_64()
270 enter_smm_save_seg_64(vcpu, &smram->es, VCPU_SREG_ES); in enter_smm_save_state_64()
271 enter_smm_save_seg_64(vcpu, &smram->cs, VCPU_SREG_CS); in enter_smm_save_state_64()
272 enter_smm_save_seg_64(vcpu, &smram->ss, VCPU_SREG_SS); in enter_smm_save_state_64()
273 enter_smm_save_seg_64(vcpu, &smram->ds, VCPU_SREG_DS); in enter_smm_save_state_64()
274 enter_smm_save_seg_64(vcpu, &smram->fs, VCPU_SREG_FS); in enter_smm_save_state_64()
275 enter_smm_save_seg_64(vcpu, &smram->gs, VCPU_SREG_GS); in enter_smm_save_state_64()
277 smram->int_shadow = static_call(kvm_x86_get_interrupt_shadow)(vcpu); in enter_smm_save_state_64()
281 void enter_smm(struct kvm_vcpu *vcpu) in enter_smm() argument
293 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) in enter_smm()
294 enter_smm_save_state_64(vcpu, &smram.smram64); in enter_smm()
297 enter_smm_save_state_32(vcpu, &smram.smram32); in enter_smm()
300 * Give enter_smm() a chance to make ISA-specific changes to the vCPU in enter_smm()
307 if (static_call(kvm_x86_enter_smm)(vcpu, &smram)) in enter_smm()
310 kvm_smm_changed(vcpu, true); in enter_smm()
312 if (kvm_vcpu_write_guest(vcpu, vcpu->arch.smbase + 0xfe00, &smram, sizeof(smram))) in enter_smm()
315 if (static_call(kvm_x86_get_nmi_mask)(vcpu)) in enter_smm()
316 vcpu->arch.hflags |= HF_SMM_INSIDE_NMI_MASK; in enter_smm()
318 static_call(kvm_x86_set_nmi_mask)(vcpu, true); in enter_smm()
320 kvm_set_rflags(vcpu, X86_EFLAGS_FIXED); in enter_smm()
321 kvm_rip_write(vcpu, 0x8000); in enter_smm()
323 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in enter_smm()
325 cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG); in enter_smm()
326 static_call(kvm_x86_set_cr0)(vcpu, cr0); in enter_smm()
328 static_call(kvm_x86_set_cr4)(vcpu, 0); in enter_smm()
332 static_call(kvm_x86_set_idt)(vcpu, &dt); in enter_smm()
334 if (WARN_ON_ONCE(kvm_set_dr(vcpu, 7, DR7_FIXED_1))) in enter_smm()
337 cs.selector = (vcpu->arch.smbase >> 4) & 0xffff; in enter_smm()
338 cs.base = vcpu->arch.smbase; in enter_smm()
355 kvm_set_segment(vcpu, &cs, VCPU_SREG_CS); in enter_smm()
356 kvm_set_segment(vcpu, &ds, VCPU_SREG_DS); in enter_smm()
357 kvm_set_segment(vcpu, &ds, VCPU_SREG_ES); in enter_smm()
358 kvm_set_segment(vcpu, &ds, VCPU_SREG_FS); in enter_smm()
359 kvm_set_segment(vcpu, &ds, VCPU_SREG_GS); in enter_smm()
360 kvm_set_segment(vcpu, &ds, VCPU_SREG_SS); in enter_smm()
363 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) in enter_smm()
364 if (static_call(kvm_x86_set_efer)(vcpu, 0)) in enter_smm()
368 kvm_update_cpuid_runtime(vcpu); in enter_smm()
369 kvm_mmu_reset_context(vcpu); in enter_smm()
372 kvm_vm_dead(vcpu->kvm); in enter_smm()
390 static int rsm_load_seg_32(struct kvm_vcpu *vcpu, in rsm_load_seg_32() argument
400 kvm_set_segment(vcpu, &desc, n); in rsm_load_seg_32()
406 static int rsm_load_seg_64(struct kvm_vcpu *vcpu, in rsm_load_seg_64() argument
416 kvm_set_segment(vcpu, &desc, n); in rsm_load_seg_64()
421 static int rsm_enter_protected_mode(struct kvm_vcpu *vcpu, in rsm_enter_protected_mode() argument
434 bad = kvm_set_cr3(vcpu, cr3); in rsm_enter_protected_mode()
443 bad = kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PCIDE); in rsm_enter_protected_mode()
447 bad = kvm_set_cr0(vcpu, cr0); in rsm_enter_protected_mode()
452 bad = kvm_set_cr4(vcpu, cr4); in rsm_enter_protected_mode()
456 bad = kvm_set_cr3(vcpu, cr3 | pcid); in rsm_enter_protected_mode()
469 struct kvm_vcpu *vcpu = ctxt->vcpu; in rsm_load_state_32() local
479 if (kvm_set_dr(vcpu, 6, smstate->dr6)) in rsm_load_state_32()
481 if (kvm_set_dr(vcpu, 7, smstate->dr7)) in rsm_load_state_32()
484 rsm_load_seg_32(vcpu, &smstate->tr, smstate->tr_sel, VCPU_SREG_TR); in rsm_load_state_32()
485 rsm_load_seg_32(vcpu, &smstate->ldtr, smstate->ldtr_sel, VCPU_SREG_LDTR); in rsm_load_state_32()
489 static_call(kvm_x86_set_gdt)(vcpu, &dt); in rsm_load_state_32()
493 static_call(kvm_x86_set_idt)(vcpu, &dt); in rsm_load_state_32()
495 rsm_load_seg_32(vcpu, &smstate->es, smstate->es_sel, VCPU_SREG_ES); in rsm_load_state_32()
496 rsm_load_seg_32(vcpu, &smstate->cs, smstate->cs_sel, VCPU_SREG_CS); in rsm_load_state_32()
497 rsm_load_seg_32(vcpu, &smstate->ss, smstate->ss_sel, VCPU_SREG_SS); in rsm_load_state_32()
499 rsm_load_seg_32(vcpu, &smstate->ds, smstate->ds_sel, VCPU_SREG_DS); in rsm_load_state_32()
500 rsm_load_seg_32(vcpu, &smstate->fs, smstate->fs_sel, VCPU_SREG_FS); in rsm_load_state_32()
501 rsm_load_seg_32(vcpu, &smstate->gs, smstate->gs_sel, VCPU_SREG_GS); in rsm_load_state_32()
503 vcpu->arch.smbase = smstate->smbase; in rsm_load_state_32()
505 r = rsm_enter_protected_mode(vcpu, smstate->cr0, in rsm_load_state_32()
511 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in rsm_load_state_32()
521 struct kvm_vcpu *vcpu = ctxt->vcpu; in rsm_load_state_64() local
531 if (kvm_set_dr(vcpu, 6, smstate->dr6)) in rsm_load_state_64()
533 if (kvm_set_dr(vcpu, 7, smstate->dr7)) in rsm_load_state_64()
536 vcpu->arch.smbase = smstate->smbase; in rsm_load_state_64()
538 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA)) in rsm_load_state_64()
541 rsm_load_seg_64(vcpu, &smstate->tr, VCPU_SREG_TR); in rsm_load_state_64()
545 static_call(kvm_x86_set_idt)(vcpu, &dt); in rsm_load_state_64()
547 rsm_load_seg_64(vcpu, &smstate->ldtr, VCPU_SREG_LDTR); in rsm_load_state_64()
551 static_call(kvm_x86_set_gdt)(vcpu, &dt); in rsm_load_state_64()
553 r = rsm_enter_protected_mode(vcpu, smstate->cr0, smstate->cr3, smstate->cr4); in rsm_load_state_64()
557 rsm_load_seg_64(vcpu, &smstate->es, VCPU_SREG_ES); in rsm_load_state_64()
558 rsm_load_seg_64(vcpu, &smstate->cs, VCPU_SREG_CS); in rsm_load_state_64()
559 rsm_load_seg_64(vcpu, &smstate->ss, VCPU_SREG_SS); in rsm_load_state_64()
560 rsm_load_seg_64(vcpu, &smstate->ds, VCPU_SREG_DS); in rsm_load_state_64()
561 rsm_load_seg_64(vcpu, &smstate->fs, VCPU_SREG_FS); in rsm_load_state_64()
562 rsm_load_seg_64(vcpu, &smstate->gs, VCPU_SREG_GS); in rsm_load_state_64()
564 static_call(kvm_x86_set_interrupt_shadow)(vcpu, 0); in rsm_load_state_64()
573 struct kvm_vcpu *vcpu = ctxt->vcpu; in emulator_leave_smm() local
579 smbase = vcpu->arch.smbase; in emulator_leave_smm()
581 ret = kvm_vcpu_read_guest(vcpu, smbase + 0xfe00, smram.bytes, sizeof(smram)); in emulator_leave_smm()
585 if ((vcpu->arch.hflags & HF_SMM_INSIDE_NMI_MASK) == 0) in emulator_leave_smm()
586 static_call(kvm_x86_set_nmi_mask)(vcpu, false); in emulator_leave_smm()
588 kvm_smm_changed(vcpu, false); in emulator_leave_smm()
592 * CR0/CR3/CR4/EFER. It's all a bit more complicated if the vCPU in emulator_leave_smm()
596 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) { in emulator_leave_smm()
601 cr4 = kvm_read_cr4(vcpu); in emulator_leave_smm()
603 kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PCIDE); in emulator_leave_smm()
609 kvm_set_segment(vcpu, &cs_desc, VCPU_SREG_CS); in emulator_leave_smm()
614 cr0 = kvm_read_cr0(vcpu); in emulator_leave_smm()
616 kvm_set_cr0(vcpu, cr0 & ~(X86_CR0_PG | X86_CR0_PE)); in emulator_leave_smm()
619 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) { in emulator_leave_smm()
623 cr4 = kvm_read_cr4(vcpu); in emulator_leave_smm()
625 kvm_set_cr4(vcpu, cr4 & ~X86_CR4_PAE); in emulator_leave_smm()
629 kvm_set_msr(vcpu, MSR_EFER, efer); in emulator_leave_smm()
634 * Give leave_smm() a chance to make ISA-specific changes to the vCPU in emulator_leave_smm()
638 if (static_call(kvm_x86_leave_smm)(vcpu, &smram)) in emulator_leave_smm()
642 if (guest_cpuid_has(vcpu, X86_FEATURE_LM)) in emulator_leave_smm()