Lines Matching +full:io +full:- +full:width

46 	return vmcb->control.exit_code == SVM_EXIT_VMMCALL;  in null_check()
51 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMRUN); in prepare_no_vmrun_int()
56 return vmcb->control.exit_code == SVM_EXIT_ERR; in check_no_vmrun_int()
66 return vmcb->control.exit_code == SVM_EXIT_VMRUN; in check_vmrun()
72 vmcb->control.intercept |= 1 << INTERCEPT_RSM; in prepare_rsm_intercept()
73 vmcb->control.intercept_exceptions |= (1ULL << UD_VECTOR); in prepare_rsm_intercept()
90 if (vmcb->control.exit_code != SVM_EXIT_RSM) { in finished_rsm_intercept()
92 vmcb->control.exit_code); in finished_rsm_intercept()
95 vmcb->control.intercept &= ~(1 << INTERCEPT_RSM); in finished_rsm_intercept()
100 if (vmcb->control.exit_code != SVM_EXIT_EXCP_BASE + UD_VECTOR) { in finished_rsm_intercept()
102 vmcb->control.exit_code); in finished_rsm_intercept()
105 vmcb->save.rip += 2; in finished_rsm_intercept()
118 vmcb->control.intercept_cr_read |= 1 << 3; in prepare_cr3_intercept()
123 asm volatile ("mov %%cr3, %0" : "=r"(test->scratch) : : "memory"); in test_cr3_intercept()
128 return vmcb->control.exit_code == SVM_EXIT_READ_CR3; in check_cr3_intercept()
133 return null_check(test) && test->scratch == read_cr3(); in check_cr3_nointercept()
141 while (!__sync_bool_compare_and_swap(&test->scratch, 1, 2)) in corrupt_cr3_intercept_bypass()
152 vmcb->control.intercept_cr_read |= 1 << 3; in prepare_cr3_intercept_bypass()
160 test->scratch = 1; in test_cr3_intercept_bypass()
161 while (test->scratch != 2) in test_cr3_intercept_bypass()
166 test->scratch = a; in test_cr3_intercept_bypass()
172 vmcb->control.intercept_dr_read = 0xff; in prepare_dr_intercept()
173 vmcb->control.intercept_dr_write = 0xff; in prepare_dr_intercept()
185 asm volatile ("mov %%dr0, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
188 asm volatile ("mov %%dr1, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
191 asm volatile ("mov %%dr2, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
194 asm volatile ("mov %%dr3, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
197 asm volatile ("mov %%dr4, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
200 asm volatile ("mov %%dr5, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
203 asm volatile ("mov %%dr6, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
206 asm volatile ("mov %%dr7, %0" : "=r"(test->scratch) : : "memory"); in test_dr_intercept()
210 if (test->scratch != i) { in test_dr_intercept()
221 asm volatile ("mov %0, %%dr0" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
224 asm volatile ("mov %0, %%dr1" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
227 asm volatile ("mov %0, %%dr2" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
230 asm volatile ("mov %0, %%dr3" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
233 asm volatile ("mov %0, %%dr4" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
236 asm volatile ("mov %0, %%dr5" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
239 asm volatile ("mov %0, %%dr6" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
242 asm volatile ("mov %0, %%dr7" : : "r"(test->scratch) : "memory"); in test_dr_intercept()
246 if (test->scratch != i) { in test_dr_intercept()
252 test->scratch = failcnt; in test_dr_intercept()
257 ulong n = (vmcb->control.exit_code - SVM_EXIT_READ_DR0); in dr_intercept_finished()
260 if (n > (SVM_EXIT_MAX_DR_INTERCEPT - SVM_EXIT_READ_DR0)) in dr_intercept_finished()
266 * Programmer's Manual Volume 2 - System Programming: in dr_intercept_finished()
270 test->scratch = (n % 16); in dr_intercept_finished()
273 vmcb->save.rip += 3; in dr_intercept_finished()
280 return !test->scratch; in check_dr_intercept()
290 vmcb->control.intercept |= (1ULL << INTERCEPT_RDTSC); in prepare_next_rip()
306 return address == vmcb->control.next_rip; in check_next_rip()
314 vmcb->control.intercept |= (1ULL << INTERCEPT_MSR_PROT); in prepare_msr_intercept()
315 vmcb->control.intercept_exceptions |= (1ULL << GP_VECTOR); in prepare_msr_intercept()
328 * Programmer's Manual volume 2 - System Programming: in test_msr_intercept()
330 * SEV_STATUS MSR (C001_0131) is a non-interceptable MSR. in test_msr_intercept()
341 test->scratch = -1; in test_msr_intercept()
346 if (test->scratch != msr_index) in test_msr_intercept()
358 if (test->scratch != msr_value) in test_msr_intercept()
362 test->scratch = -2; in test_msr_intercept()
367 u32 exit_code = vmcb->control.exit_code; in msr_intercept_finished()
372 exit_info_1 = vmcb->control.exit_info_1; in msr_intercept_finished()
382 opcode = (u8 *)vmcb->save.rip; in msr_intercept_finished()
406 vmcb->save.rip += 2; in msr_intercept_finished()
410 * For RDMSR, test->scratch is set to the MSR index; in msr_intercept_finished()
412 * For WRMSR, test->scratch is set to the MSR value; in msr_intercept_finished()
417 test->scratch = in msr_intercept_finished()
418 ((get_regs().rdx << 32) | (vmcb->save.rax & 0xffffffff)); in msr_intercept_finished()
420 test->scratch = get_regs().rcx; in msr_intercept_finished()
428 return (test->scratch == -2); in check_msr_intercept()
433 vmcb->control.intercept_exceptions |= (1ULL << GP_VECTOR) in prepare_mode_switch()
437 test->scratch = 0; in prepare_mode_switch()
443 " ljmp *1f\n" /* jump to 32-bit code segment */ in test_mode_switch()
461 " ljmpl %[cs16], $3f\n" /* jump to 16 bit protected-mode */ in test_mode_switch()
467 " ljmpl $0, $4f\n" /* jump to real-mode */ in test_mode_switch()
499 cr0 = vmcb->save.cr0; in mode_switch_finished()
500 cr4 = vmcb->save.cr4; in mode_switch_finished()
501 efer = vmcb->save.efer; in mode_switch_finished()
504 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) in mode_switch_finished()
508 vmcb->save.rip += 3; in mode_switch_finished()
511 switch (test->scratch) { in mode_switch_finished()
513 /* Test should be in real mode now - check for this */ in mode_switch_finished()
520 /* Test should be back in long-mode now - check for this */ in mode_switch_finished()
529 test->scratch += 1; in mode_switch_finished()
531 return test->scratch == 2; in mode_switch_finished()
536 return test->scratch == 2; in check_mode_switch()
543 vmcb->control.intercept |= (1ULL << INTERCEPT_IOIO_PROT); in prepare_ioio()
544 test->scratch = 0; in prepare_ioio()
551 // stage 0, test IO pass in test_ioio()
557 // test IO width, in/out in test_ioio()
572 // test low/high IO port in test_ioio()
617 test->scratch = -1; in test_ioio()
625 if (vmcb->control.exit_code == SVM_EXIT_VMMCALL) in ioio_finished()
628 if (vmcb->control.exit_code != SVM_EXIT_IOIO) in ioio_finished()
632 test->scratch += 1; in ioio_finished()
634 port = vmcb->control.exit_info_1 >> 16; in ioio_finished()
635 size = (vmcb->control.exit_info_1 >> SVM_IOIO_SIZE_SHIFT) & 7; in ioio_finished()
637 while (size--) { in ioio_finished()
648 return test->scratch != -1; in check_ioio()
653 vmcb->control.asid = 0; in prepare_asid_zero()
663 return vmcb->control.exit_code == SVM_EXIT_ERR; in check_asid_zero()
668 vmcb->control.intercept |= (1ULL << INTERCEPT_SELECTIVE_CR0); in sel_cr0_bug_prepare()
687 * are not in guest-mode anymore so we can't trigger an intercept. in sel_cr0_bug_test()
688 * Trigger a tripple-fault for now. in sel_cr0_bug_test()
690 report_fail("sel_cr0 test. Can not recover from this - exiting"); in sel_cr0_bug_test()
696 return vmcb->control.exit_code == SVM_EXIT_CR0_SEL_WRITE; in sel_cr0_bug_check()
711 vmcb->control.tsc_offset = TSC_OFFSET_VALUE; in tsc_adjust_prepare()
713 wrmsr(MSR_IA32_TSC_ADJUST, -TSC_ADJUST_VALUE); in tsc_adjust_prepare()
715 ok = adjust == -TSC_ADJUST_VALUE; in tsc_adjust_prepare()
721 ok &= adjust == -TSC_ADJUST_VALUE; in tsc_adjust_test()
723 uint64_t l1_tsc = rdtsc() - TSC_OFFSET_VALUE; in tsc_adjust_test()
724 wrmsr(MSR_IA32_TSC, l1_tsc - TSC_ADJUST_VALUE); in tsc_adjust_test()
727 ok &= adjust <= -2 * TSC_ADJUST_VALUE; in tsc_adjust_test()
729 uint64_t l1_tsc_end = rdtsc() - TSC_OFFSET_VALUE; in tsc_adjust_test()
730 ok &= (l1_tsc_end + TSC_ADJUST_VALUE - l1_tsc) < TSC_ADJUST_VALUE; in tsc_adjust_test()
732 uint64_t l1_tsc_msr = rdmsr(MSR_IA32_TSC) - TSC_OFFSET_VALUE; in tsc_adjust_test()
733 ok &= (l1_tsc_msr + TSC_ADJUST_VALUE - l1_tsc) < TSC_ADJUST_VALUE; in tsc_adjust_test()
741 return ok && adjust <= -2 * TSC_ADJUST_VALUE; in tsc_adjust_check()
754 while (rdtsc() - start_tsc < guest_tsc_delay_value) in svm_tsc_scale_guest()
766 vmcb->control.tsc_offset = tsc_offset; in svm_tsc_scale_run_testcase()
772 report_fail("unexpected vm exit code 0x%x", vmcb->control.exit_code); in svm_tsc_scale_run_testcase()
774 actual_duration = (rdtsc() - start_tsc) >> TSC_SHIFT; in svm_tsc_scale_run_testcase()
812 latvmrun_min = latvmexit_min = -1ULL; in latency_prepare()
825 cycles = tsc_end - tsc_start; in latency_test()
847 cycles = tsc_end - tsc_start; in latency_finished()
857 vmcb->save.rip += 3; in latency_finished()
859 runs -= 1; in latency_finished()
868 vmcb->control.clean = VMCB_CLEAN_ALL; in latency_finished_clean()
885 latvmload_min = latvmsave_min = latstgi_min = latclgi_min = -1ULL; in lat_svm_insn_prepare()
895 for ( ; runs != 0; runs--) { in lat_svm_insn_finished()
898 cycles = rdtsc() - tsc_start; in lat_svm_insn_finished()
907 cycles = rdtsc() - tsc_start; in lat_svm_insn_finished()
916 cycles = rdtsc() - tsc_start; in lat_svm_insn_finished()
925 cycles = rdtsc() - tsc_start; in lat_svm_insn_finished()
952 * Report failures from SVM guest code, and on failure, set the stage to -1 and
953 * do VMMCALL to terminate the test (host side must treat -1 as "finished").
961 set_test_stage(test, -1); \
987 vmcb->control.intercept |= (1ULL << INTERCEPT_INTR); in pending_event_prepare()
988 vmcb->control.int_ctl |= V_INTR_MASKING_MASK; in pending_event_prepare()
1005 if (vmcb->control.exit_code != SVM_EXIT_INTR) { in pending_event_finished()
1007 vmcb->control.exit_code); in pending_event_finished()
1011 vmcb->control.intercept &= ~(1ULL << INTERCEPT_INTR); in pending_event_finished()
1012 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in pending_event_finished()
1086 report_svm_guest(vmcb->control.exit_code == SVM_EXIT_VMMCALL, test, in pending_event_cli_finished()
1087 "Wanted VMMCALL VM-Exit, got exit reason 0x%x", in pending_event_cli_finished()
1088 vmcb->control.exit_code); in pending_event_cli_finished()
1092 vmcb->save.rip += 3; in pending_event_cli_finished()
1096 vmcb->control.int_ctl |= V_INTR_MASKING_MASK; in pending_event_cli_finished()
1187 report(rdtsc() - start > 10000, "IRQ arrived after expected delay"); in interrupt_test()
1199 report(rdtsc() - start > 10000, "IRQ arrived after expected delay"); in interrupt_test()
1209 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in interrupt_finished()
1211 vmcb->control.exit_code); in interrupt_finished()
1214 vmcb->save.rip += 3; in interrupt_finished()
1216 vmcb->control.intercept |= (1ULL << INTERCEPT_INTR); in interrupt_finished()
1217 vmcb->control.int_ctl |= V_INTR_MASKING_MASK; in interrupt_finished()
1222 if (vmcb->control.exit_code != SVM_EXIT_INTR) { in interrupt_finished()
1224 vmcb->control.exit_code); in interrupt_finished()
1230 vmcb->control.intercept &= ~(1ULL << INTERCEPT_INTR); in interrupt_finished()
1231 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in interrupt_finished()
1285 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in nmi_finished()
1287 vmcb->control.exit_code); in nmi_finished()
1290 vmcb->save.rip += 3; in nmi_finished()
1292 vmcb->control.intercept |= (1ULL << INTERCEPT_NMI); in nmi_finished()
1296 if (vmcb->control.exit_code != SVM_EXIT_NMI) { in nmi_finished()
1298 vmcb->control.exit_code); in nmi_finished()
1356 report(rdtsc() - start > NMI_DELAY, "direct NMI after expected delay"); in nmi_hlt_test()
1369 report(rdtsc() - start > NMI_DELAY, "intercepted NMI after expected delay"); in nmi_hlt_test()
1378 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in nmi_hlt_finished()
1380 vmcb->control.exit_code); in nmi_hlt_finished()
1383 vmcb->save.rip += 3; in nmi_hlt_finished()
1385 vmcb->control.intercept |= (1ULL << INTERCEPT_NMI); in nmi_hlt_finished()
1389 if (vmcb->control.exit_code != SVM_EXIT_NMI) { in nmi_hlt_finished()
1391 vmcb->control.exit_code); in nmi_hlt_finished()
1419 * intercepting "real" NMIs should result in an ERR VM-Exit. in vnmi_prepare()
1421 vmcb->control.intercept &= ~(1ULL << INTERCEPT_NMI); in vnmi_prepare()
1422 vmcb->control.int_ctl = V_NMI_ENABLE_MASK; in vnmi_prepare()
1423 vmcb->control.int_vector = NMI_VECTOR; in vnmi_prepare()
1439 if (vmcb->control.exit_code != SVM_EXIT_ERR) { in vnmi_finished()
1440 report_fail("Wanted ERR VM-Exit, got 0x%x", in vnmi_finished()
1441 vmcb->control.exit_code); in vnmi_finished()
1445 vmcb->control.intercept |= (1ULL << INTERCEPT_NMI); in vnmi_finished()
1446 vmcb->save.rip += 3; in vnmi_finished()
1450 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in vnmi_finished()
1451 report_fail("Wanted VMMCALL VM-Exit, got 0x%x", in vnmi_finished()
1452 vmcb->control.exit_code); in vnmi_finished()
1456 vmcb->control.int_ctl |= V_NMI_PENDING_MASK; in vnmi_finished()
1457 vmcb->save.rip += 3; in vnmi_finished()
1461 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in vnmi_finished()
1462 report_fail("Wanted VMMCALL VM-Exit, got 0x%x", in vnmi_finished()
1463 vmcb->control.exit_code); in vnmi_finished()
1466 if (vmcb->control.int_ctl & V_NMI_BLOCKING_MASK) { in vnmi_finished()
1471 vmcb->save.rip += 3; in vnmi_finished()
1512 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in exc_inject_finished()
1514 vmcb->control.exit_code); in exc_inject_finished()
1517 vmcb->save.rip += 3; in exc_inject_finished()
1518 vmcb->control.event_inj = NMI_VECTOR | SVM_EVTINJ_TYPE_EXEPT | SVM_EVTINJ_VALID; in exc_inject_finished()
1522 if (vmcb->control.exit_code != SVM_EXIT_ERR) { in exc_inject_finished()
1524 vmcb->control.exit_code); in exc_inject_finished()
1528 vmcb->control.event_inj = DE_VECTOR | SVM_EVTINJ_TYPE_EXEPT | SVM_EVTINJ_VALID; in exc_inject_finished()
1532 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in exc_inject_finished()
1534 vmcb->control.exit_code); in exc_inject_finished()
1537 vmcb->save.rip += 3; in exc_inject_finished()
1539 report(!(vmcb->control.event_inj & SVM_EVTINJ_VALID), "eventinj.VALID cleared"); in exc_inject_finished()
1562 virq_rip = regs->rip; in virq_isr()
1569 vmcb->control.int_ctl = V_INTR_MASKING_MASK | V_IRQ_MASK | in virq_inject_prepare()
1571 vmcb->control.int_vector = 0xf1; in virq_inject_prepare()
1573 virq_rip = -1; in virq_inject_prepare()
1606 vmcb->save.rip += 3; in virq_inject_finished()
1610 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in virq_inject_finished()
1612 vmcb->control.exit_code); in virq_inject_finished()
1615 if (vmcb->control.int_ctl & V_IRQ_MASK) { in virq_inject_finished()
1620 vmcb->control.intercept |= (1ULL << INTERCEPT_VINTR); in virq_inject_finished()
1621 vmcb->control.int_ctl = V_INTR_MASKING_MASK | V_IRQ_MASK | in virq_inject_finished()
1626 if (vmcb->control.exit_code != SVM_EXIT_VINTR) { in virq_inject_finished()
1628 vmcb->control.exit_code); in virq_inject_finished()
1635 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VINTR); in virq_inject_finished()
1639 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in virq_inject_finished()
1641 vmcb->control.exit_code); in virq_inject_finished()
1646 vmcb->control.int_ctl = V_INTR_MASKING_MASK | V_IRQ_MASK | in virq_inject_finished()
1649 vmcb->control.int_ctl |= 0x0a & V_TPR_MASK; in virq_inject_finished()
1653 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in virq_inject_finished()
1655 vmcb->control.exit_code); in virq_inject_finished()
1658 vmcb->control.intercept |= (1ULL << INTERCEPT_VINTR); in virq_inject_finished()
1663 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in virq_inject_finished()
1665 vmcb->control.exit_code); in virq_inject_finished()
1687 vmcb->control.int_state = SVM_INTERRUPT_SHADOW_MASK; in virq_inject_within_shadow_prepare()
1688 vmcb->save.rflags |= X86_EFLAGS_IF; in virq_inject_within_shadow_prepare()
1696 vmcb->save.rip = (unsigned long) test->guest_func; in virq_inject_within_shadow_prepare_gif_clear()
1701 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) in virq_inject_within_shadow_finished()
1703 vmcb->control.exit_code); in virq_inject_within_shadow_finished()
1708 else if (vmcb->control.int_ctl & V_IRQ_MASK) in virq_inject_within_shadow_finished()
1710 else if (vmcb->control.int_state & SVM_INTERRUPT_SHADOW_MASK) in virq_inject_within_shadow_finished()
1727 * In the assembly loop below 'ins' is executed while IO instructions
1735 * a nested VMexit. Due to a bug, it would leave pre-emulation values of RIP,
1758 vmcb->control.int_ctl = V_INTR_MASKING_MASK; in reg_corruption_prepare()
1759 vmcb->control.intercept |= (1ULL << INTERCEPT_INTR); in reg_corruption_prepare()
1773 "movw $0x4d0, %%dx\n\t" // IO port in reg_corruption_test()
1795 if (vmcb->control.exit_code == SVM_EXIT_INTR) { in reg_corruption_finished()
1797 void* guest_rip = (void*)vmcb->save.rip; in reg_corruption_finished()
1843 tss_entry->type &= ~DESC_BUSY; in init_startup_prepare()
1866 vmcb->control.intercept |= (1ULL << INTERCEPT_INIT); in init_intercept_prepare()
1876 vmcb->save.rip += 3; in init_intercept_finished()
1878 if (vmcb->control.exit_code != SVM_EXIT_INIT) { in init_intercept_finished()
1880 vmcb->control.exit_code); in init_intercept_finished()
1922 r->rflags &= ~X86_EFLAGS_TF; in host_rflags_db_handler()
1923 rip_detected = r->rip; in host_rflags_db_handler()
1925 r->rflags |= X86_EFLAGS_RF; in host_rflags_db_handler()
1929 if (r->rip == (u64)&vmrun_rip) { in host_rflags_db_handler()
1934 rip_detected = r->rip; in host_rflags_db_handler()
1935 r->rflags &= ~X86_EFLAGS_TF; in host_rflags_db_handler()
1944 r->rflags &= ~X86_EFLAGS_TF; in host_rflags_db_handler()
1980 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in host_rflags_finished()
1982 vmcb->control.exit_code); in host_rflags_finished()
1985 vmcb->save.rip += 3; in host_rflags_finished()
1993 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL || in host_rflags_finished()
1997 vmcb->control.exit_code); in host_rflags_finished()
2000 vmcb->save.rip += 3; in host_rflags_finished()
2009 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL || in host_rflags_finished()
2013 "%lx", vmcb->control.exit_code, in host_rflags_finished()
2020 vmcb->save.rip += 3; in host_rflags_finished()
2023 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL || in host_rflags_finished()
2031 "%lx", vmcb->control.exit_code, in host_rflags_finished()
2037 vmcb->save.rip += 3; in host_rflags_finished()
2059 * after VM-exit from an L2 guest that sets CR4.OSXSAVE to a different
2079 vmcb->save.cr4 = cr4; in svm_cr4_osxsave_test()
2127 vmcb->save.cr0 = tmp; \
2130 vmcb->save.cr3 = tmp; \
2133 vmcb->save.cr4 = tmp; \
2144 * Un-setting EFER.SVME is illegal in test_efer()
2146 u64 efer_saved = vmcb->save.efer; in test_efer()
2151 vmcb->save.efer = efer; in test_efer()
2153 vmcb->save.efer = efer_saved; in test_efer()
2158 efer_saved = vmcb->save.efer; in test_efer()
2160 SVM_TEST_REG_RESERVED_BITS(8, 9, 1, "EFER", vmcb->save.efer, in test_efer()
2162 SVM_TEST_REG_RESERVED_BITS(16, 63, 4, "EFER", vmcb->save.efer, in test_efer()
2168 u64 cr0_saved = vmcb->save.cr0; in test_efer()
2170 u64 cr4_saved = vmcb->save.cr4; in test_efer()
2174 vmcb->save.efer = efer; in test_efer()
2176 vmcb->save.cr0 = cr0; in test_efer()
2178 vmcb->save.cr4 = cr4; in test_efer()
2189 vmcb->save.cr4 = cr4; in test_efer()
2191 vmcb->save.cr0 = cr0; in test_efer()
2196 * EFER.LME, CR0.PG, CR4.PAE, CS.L, and CS.D are all non-zero. in test_efer()
2198 u32 cs_attrib_saved = vmcb->save.cs.attrib; in test_efer()
2202 vmcb->save.cr0 = cr0; in test_efer()
2205 vmcb->save.cs.attrib = cs_attrib; in test_efer()
2210 vmcb->save.cr0 = cr0_saved; in test_efer()
2211 vmcb->save.cr4 = cr4_saved; in test_efer()
2212 vmcb->save.efer = efer_saved; in test_efer()
2213 vmcb->save.cs.attrib = cs_attrib_saved; in test_efer()
2219 * Un-setting CR0.CD and setting CR0.NW is illegal combination in test_cr0()
2221 u64 cr0_saved = vmcb->save.cr0; in test_cr0()
2226 vmcb->save.cr0 = cr0; in test_cr0()
2230 vmcb->save.cr0 = cr0; in test_cr0()
2235 vmcb->save.cr0 = cr0; in test_cr0()
2239 vmcb->save.cr0 = cr0; in test_cr0()
2242 vmcb->save.cr0 = cr0_saved; in test_cr0()
2249 SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "CR0", vmcb->save.cr0, cr0_saved, in test_cr0()
2251 vmcb->save.cr0 = cr0_saved; in test_cr0()
2258 * [63:52] - long mode in test_cr3()
2260 u64 cr3_saved = vmcb->save.cr3; in test_cr3()
2265 vmcb->save.cr3 = cr3_saved & ~SVM_CR3_LONG_MBZ_MASK; in test_cr3()
2267 vmcb->save.cr3); in test_cr3()
2270 * CR3 non-MBZ reserved bits based on different modes: in test_cr3()
2271 * [11:5] [2:0] - long mode (PCIDE=0) in test_cr3()
2272 * [2:0] - PAE legacy mode in test_cr3()
2274 u64 cr4_saved = vmcb->save.cr4; in test_cr3()
2281 vmcb->save.cr4 = cr4_saved | X86_CR4_PCIDE; in test_cr3()
2285 vmcb->save.cr3 = cr3_saved & ~SVM_CR3_LONG_RESERVED_MASK; in test_cr3()
2287 vmcb->save.cr3); in test_cr3()
2290 vmcb->save.cr4 = cr4_saved & ~X86_CR4_PCIDE; in test_cr3()
2302 vmcb->save.cr3 = cr3_saved; in test_cr3()
2308 vmcb->save.cr4 = cr4_saved | X86_CR4_PAE; in test_cr3()
2315 vmcb->save.cr3 = cr3_saved; in test_cr3()
2316 vmcb->save.cr4 = cr4_saved; in test_cr3()
2322 u64 cr4_saved = vmcb->save.cr4; in test_cr4()
2323 u64 efer_saved = vmcb->save.efer; in test_cr4()
2327 vmcb->save.efer = efer; in test_cr4()
2332 vmcb->save.efer = efer; in test_cr4()
2338 vmcb->save.cr4 = cr4_saved; in test_cr4()
2339 vmcb->save.efer = efer_saved; in test_cr4()
2347 u64 dr_saved = vmcb->save.dr6; in test_dr()
2349 SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "DR6", vmcb->save.dr6, dr_saved, in test_dr()
2351 vmcb->save.dr6 = dr_saved; in test_dr()
2353 dr_saved = vmcb->save.dr7; in test_dr()
2354 SVM_TEST_REG_RESERVED_BITS(32, 63, 4, "DR7", vmcb->save.dr7, dr_saved, in test_dr()
2357 vmcb->save.dr7 = dr_saved; in test_dr()
2360 /* TODO: verify if high 32-bits are sign- or zero-extended on bare metal */
2363 vmcb->control.intercept = saved_intercept | 1ULL << type; \
2365 vmcb->control.msrpm_base_pa = addr; \
2367 vmcb->control.iopm_base_pa = addr; \
2390 u64 saved_intercept = vmcb->control.intercept; in test_msrpm_iopm_bitmap_addrs()
2392 u64 addr = virt_to_phys(msr_bitmap) & (~((1ull << 12) - 1)); in test_msrpm_iopm_bitmap_addrs()
2395 addr_beyond_limit - 2 * PAGE_SIZE, SVM_EXIT_ERR, in test_msrpm_iopm_bitmap_addrs()
2398 addr_beyond_limit - 2 * PAGE_SIZE + 1, SVM_EXIT_ERR, in test_msrpm_iopm_bitmap_addrs()
2401 addr_beyond_limit - PAGE_SIZE, SVM_EXIT_ERR, in test_msrpm_iopm_bitmap_addrs()
2405 addr |= (1ull << 12) - 1; in test_msrpm_iopm_bitmap_addrs()
2410 addr_beyond_limit - 4 * PAGE_SIZE, SVM_EXIT_VMMCALL, in test_msrpm_iopm_bitmap_addrs()
2413 addr_beyond_limit - 3 * PAGE_SIZE, SVM_EXIT_VMMCALL, in test_msrpm_iopm_bitmap_addrs()
2416 addr_beyond_limit - 2 * PAGE_SIZE - 2, SVM_EXIT_VMMCALL, in test_msrpm_iopm_bitmap_addrs()
2419 addr_beyond_limit - 2 * PAGE_SIZE, SVM_EXIT_ERR, in test_msrpm_iopm_bitmap_addrs()
2422 addr_beyond_limit - PAGE_SIZE, SVM_EXIT_ERR, in test_msrpm_iopm_bitmap_addrs()
2424 addr = virt_to_phys(io_bitmap) & (~((1ull << 11) - 1)); in test_msrpm_iopm_bitmap_addrs()
2427 addr |= (1ull << 12) - 1; in test_msrpm_iopm_bitmap_addrs()
2431 vmcb->control.intercept = saved_intercept; in test_msrpm_iopm_bitmap_addrs()
2440 seg_base = (seg_base & ((1ul << addr_limit) - 1)) | noncanonical_mask; \
2449 seg_base = (seg_base & ((1ul << addr_limit) - 1)) | noncanonical_mask; \
2464 u64 noncanonical_mask = NONCANONICAL & ~((1ul << addr_limit) - 1); in test_canonicalization()
2466 TEST_CANONICAL_VMLOAD(vmcb->save.fs.base, "FS"); in test_canonicalization()
2467 TEST_CANONICAL_VMLOAD(vmcb->save.gs.base, "GS"); in test_canonicalization()
2468 TEST_CANONICAL_VMLOAD(vmcb->save.ldtr.base, "LDTR"); in test_canonicalization()
2469 TEST_CANONICAL_VMLOAD(vmcb->save.tr.base, "TR"); in test_canonicalization()
2470 TEST_CANONICAL_VMLOAD(vmcb->save.kernel_gs_base, "KERNEL GS"); in test_canonicalization()
2471 TEST_CANONICAL_VMRUN(vmcb->save.es.base, "ES"); in test_canonicalization()
2472 TEST_CANONICAL_VMRUN(vmcb->save.cs.base, "CS"); in test_canonicalization()
2473 TEST_CANONICAL_VMRUN(vmcb->save.ss.base, "SS"); in test_canonicalization()
2474 TEST_CANONICAL_VMRUN(vmcb->save.ds.base, "DS"); in test_canonicalization()
2475 TEST_CANONICAL_VMRUN(vmcb->save.gdtr.base, "GDTR"); in test_canonicalization()
2476 TEST_CANONICAL_VMRUN(vmcb->save.idtr.base, "IDTR"); in test_canonicalization()
2490 guest_rflags_test_trap_rip = r->rip; in guest_rflags_test_db_handler()
2491 r->rflags &= ~X86_EFLAGS_TF; in guest_rflags_test_db_handler()
2530 vmcb->save.rflags |= X86_EFLAGS_TF; in svm_test_singlestep()
2538 vmcb->save.rip += 3; in svm_test_singlestep()
2539 vmcb->save.rflags |= X86_EFLAGS_TF; in svm_test_singlestep()
2540 report (__svm_vmrun(vmcb->save.rip) == SVM_EXIT_VMMCALL && in svm_test_singlestep()
2546 vmcb->save.rip += 3; in svm_test_singlestep()
2547 report (__svm_vmrun(vmcb->save.rip) == SVM_EXIT_VMMCALL && in svm_test_singlestep()
2548 vmcb->save.rip == (u64)&guest_end, "Test EFLAGS.TF on VMRUN: guest execution completion"); in svm_test_singlestep()
2571 r->rip += 3; in gp_isr()
2601 report_fail("Got #GP exception - svm errata reproduced at 0x%lx", in svm_vmrun_errata_test()
2627 u32 intercept_saved = vmcb->control.intercept; in svm_vmload_vmsave()
2635 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMLOAD); in svm_vmload_vmsave()
2636 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMSAVE); in svm_vmload_vmsave()
2638 report(vmcb->control.exit_code == SVM_EXIT_VMMCALL, "Test " in svm_vmload_vmsave()
2645 vmcb->control.intercept |= (1ULL << INTERCEPT_VMLOAD); in svm_vmload_vmsave()
2647 report(vmcb->control.exit_code == SVM_EXIT_VMLOAD, "Test " in svm_vmload_vmsave()
2649 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMLOAD); in svm_vmload_vmsave()
2650 vmcb->control.intercept |= (1ULL << INTERCEPT_VMSAVE); in svm_vmload_vmsave()
2652 report(vmcb->control.exit_code == SVM_EXIT_VMSAVE, "Test " in svm_vmload_vmsave()
2654 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMSAVE); in svm_vmload_vmsave()
2656 report(vmcb->control.exit_code == SVM_EXIT_VMMCALL, "Test " in svm_vmload_vmsave()
2659 vmcb->control.intercept |= (1ULL << INTERCEPT_VMLOAD); in svm_vmload_vmsave()
2661 report(vmcb->control.exit_code == SVM_EXIT_VMLOAD, "Test " in svm_vmload_vmsave()
2663 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMLOAD); in svm_vmload_vmsave()
2665 report(vmcb->control.exit_code == SVM_EXIT_VMMCALL, "Test " in svm_vmload_vmsave()
2668 vmcb->control.intercept |= (1ULL << INTERCEPT_VMSAVE); in svm_vmload_vmsave()
2670 report(vmcb->control.exit_code == SVM_EXIT_VMSAVE, "Test " in svm_vmload_vmsave()
2672 vmcb->control.intercept &= ~(1ULL << INTERCEPT_VMSAVE); in svm_vmload_vmsave()
2674 report(vmcb->control.exit_code == SVM_EXIT_VMMCALL, "Test " in svm_vmload_vmsave()
2677 vmcb->control.intercept = intercept_saved; in svm_vmload_vmsave()
2695 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in vgif_finished()
2699 vmcb->control.int_ctl |= V_GIF_ENABLED_MASK; in vgif_finished()
2700 vmcb->save.rip += 3; in vgif_finished()
2704 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in vgif_finished()
2708 if (!(vmcb->control.int_ctl & V_GIF_MASK)) { in vgif_finished()
2710 vmcb->control.int_ctl &= ~V_GIF_ENABLED_MASK; in vgif_finished()
2714 vmcb->save.rip += 3; in vgif_finished()
2718 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in vgif_finished()
2722 if (vmcb->control.int_ctl & V_GIF_MASK) { in vgif_finished()
2724 vmcb->control.int_ctl &= ~V_GIF_ENABLED_MASK; in vgif_finished()
2728 vmcb->save.rip += 3; in vgif_finished()
2730 vmcb->control.int_ctl &= ~V_GIF_ENABLED_MASK; in vgif_finished()
2773 vmcb->control.pause_filter_count = filter_value; in pause_filter_run_test()
2774 vmcb->control.pause_filter_thresh = threshold; in pause_filter_run_test()
2778 report(vmcb->control.exit_code == SVM_EXIT_PAUSE, "expected PAUSE vmexit"); in pause_filter_run_test()
2780 report(vmcb->control.exit_code == SVM_EXIT_VMMCALL, "no expected PAUSE vmexit"); in pause_filter_run_test()
2790 vmcb->control.intercept |= (1 << INTERCEPT_PAUSE); in pause_filter_test()
2792 // filter count more that pause count - no VMexit in pause_filter_test()
2795 // filter count smaller pause count - no VMexit in pause_filter_test()
2800 // filter count smaller pause count - no VMexit + large enough threshold in pause_filter_test()
2804 // filter count smaller pause count - no VMexit + small threshold in pause_filter_test()
2819 vmcb->save.cr0 = vmcb->save.cr0 & ~(X86_CR0_TS | X86_CR0_EM); in svm_no_nm_test()
2849 vmcb->control.exit_code, vmcb->save.rip, \
2850 vmcb->save.br_from, vmcb->save.br_to, \
2851 vmcb->save.last_excp_from, vmcb->save.last_excp_to)
2942 vmcb->control.virt_ext = 0; in svm_lbrv_test1()
2949 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in svm_lbrv_test1()
2963 vmcb->control.virt_ext = 0; in svm_lbrv_test2()
2972 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in svm_lbrv_test2()
2990 vmcb->control.virt_ext = LBR_CTL_ENABLE_MASK; in svm_lbrv_nested_test1()
2991 vmcb->save.dbgctl = DEBUGCTLMSR_LBR; in svm_lbrv_nested_test1()
2999 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in svm_lbrv_nested_test1()
3004 if (vmcb->save.dbgctl != 0) { in svm_lbrv_nested_test1()
3005 report(false, "unexpected virtual guest MSR_IA32_DEBUGCTLMSR value 0x%lx", vmcb->save.dbgctl); in svm_lbrv_nested_test1()
3022 vmcb->control.virt_ext = LBR_CTL_ENABLE_MASK; in svm_lbrv_nested_test2()
3024 vmcb->save.dbgctl = 0; in svm_lbrv_nested_test2()
3025 vmcb->save.br_from = (u64)&host_branch2_from; in svm_lbrv_nested_test2()
3026 vmcb->save.br_to = (u64)&host_branch2_to; in svm_lbrv_nested_test2()
3034 if (vmcb->control.exit_code != SVM_EXIT_VMMCALL) { in svm_lbrv_nested_test2()
3080 report (vmcb->control.exit_code == expected_vmexit, "Test expected VM exit"); in svm_intr_intercept_mix_run_guest()
3081 report(vmcb->save.rflags & X86_EFLAGS_IF, "Guest should have EFLAGS.IF set now"); in svm_intr_intercept_mix_run_guest()
3100 vmcb->control.intercept |= (1 << INTERCEPT_INTR); in svm_intr_intercept_mix_if()
3101 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in svm_intr_intercept_mix_if()
3102 vmcb->save.rflags &= ~X86_EFLAGS_IF; in svm_intr_intercept_mix_if()
3133 vmcb->control.intercept |= (1 << INTERCEPT_INTR); in svm_intr_intercept_mix_gif()
3134 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in svm_intr_intercept_mix_gif()
3135 vmcb->save.rflags &= ~X86_EFLAGS_IF; in svm_intr_intercept_mix_gif()
3163 vmcb->control.intercept |= (1 << INTERCEPT_INTR); in svm_intr_intercept_mix_gif2()
3164 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in svm_intr_intercept_mix_gif2()
3165 vmcb->save.rflags |= X86_EFLAGS_IF; in svm_intr_intercept_mix_gif2()
3192 vmcb->control.intercept |= (1 << INTERCEPT_NMI); in svm_intr_intercept_mix_nmi()
3193 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in svm_intr_intercept_mix_nmi()
3194 vmcb->save.rflags |= X86_EFLAGS_IF; in svm_intr_intercept_mix_nmi()
3216 vmcb->control.intercept |= (1 << INTERCEPT_SMI); in svm_intr_intercept_mix_smi()
3217 vmcb->control.int_ctl &= ~V_INTR_MASKING_MASK; in svm_intr_intercept_mix_smi()
3255 report(regs->vector == svm_exception_test_vector, in svm_exception_handler()
3274 u32 old_ie = vmcb->control.intercept_exceptions; in handle_exception_in_l1()
3276 vmcb->control.intercept_exceptions |= (1ULL << vector); in handle_exception_in_l1()
3281 vmcb->control.intercept_exceptions = old_ie; in handle_exception_in_l1()
3291 test_set_guest((test_guest_func)t->guest_code); in svm_exception_test()
3293 handle_exception_in_l2(t->vector); in svm_exception_test()
3296 handle_exception_in_l1(t->vector); in svm_exception_test()
3311 vmcb->save.idtr.base = (u64)alloc_vpage(); in svm_shutdown_intercept_test()
3312 vmcb->control.intercept |= (1ULL << INTERCEPT_SHUTDOWN); in svm_shutdown_intercept_test()
3314 report(vmcb->control.exit_code == SVM_EXIT_SHUTDOWN, "shutdown test passed"); in svm_shutdown_intercept_test()