xref: /kvm-unit-tests/x86/vmx_tests.c (revision 4f69a3d4c121b7400e3d21509487600546fdf4c0)
1 #include "vmx.h"
2 #include "msr.h"
3 #include "processor.h"
4 #include "vm.h"
5 
6 u64 ia32_pat;
7 u64 ia32_efer;
8 volatile u32 stage;
9 
10 static inline void vmcall()
11 {
12 	asm volatile("vmcall");
13 }
14 
15 static inline void set_stage(u32 s)
16 {
17 	barrier();
18 	stage = s;
19 	barrier();
20 }
21 
22 void basic_init()
23 {
24 }
25 
26 void basic_guest_main()
27 {
28 	/* Here is a basic guest_main, print Hello World */
29 	printf("\tHello World, this is null_guest_main!\n");
30 }
31 
32 int basic_exit_handler()
33 {
34 	u64 guest_rip;
35 	ulong reason;
36 
37 	guest_rip = vmcs_read(GUEST_RIP);
38 	reason = vmcs_read(EXI_REASON) & 0xff;
39 
40 	switch (reason) {
41 	case VMX_VMCALL:
42 		print_vmexit_info();
43 		vmcs_write(GUEST_RIP, guest_rip + 3);
44 		return VMX_TEST_RESUME;
45 	default:
46 		break;
47 	}
48 	printf("ERROR : Unhandled vmx exit.\n");
49 	print_vmexit_info();
50 	return VMX_TEST_EXIT;
51 }
52 
53 void basic_syscall_handler(u64 syscall_no)
54 {
55 }
56 
57 void vmenter_main()
58 {
59 	u64 rax;
60 	u64 rsp, resume_rsp;
61 
62 	report("test vmlaunch", 1);
63 
64 	asm volatile(
65 		"mov %%rsp, %0\n\t"
66 		"mov %3, %%rax\n\t"
67 		"vmcall\n\t"
68 		"mov %%rax, %1\n\t"
69 		"mov %%rsp, %2\n\t"
70 		: "=r"(rsp), "=r"(rax), "=r"(resume_rsp)
71 		: "g"(0xABCD));
72 	report("test vmresume", (rax == 0xFFFF) && (rsp == resume_rsp));
73 }
74 
75 int vmenter_exit_handler()
76 {
77 	u64 guest_rip;
78 	ulong reason;
79 
80 	guest_rip = vmcs_read(GUEST_RIP);
81 	reason = vmcs_read(EXI_REASON) & 0xff;
82 	switch (reason) {
83 	case VMX_VMCALL:
84 		if (regs.rax != 0xABCD) {
85 			report("test vmresume", 0);
86 			return VMX_TEST_VMEXIT;
87 		}
88 		regs.rax = 0xFFFF;
89 		vmcs_write(GUEST_RIP, guest_rip + 3);
90 		return VMX_TEST_RESUME;
91 	default:
92 		report("test vmresume", 0);
93 		print_vmexit_info();
94 	}
95 	return VMX_TEST_VMEXIT;
96 }
97 
98 void msr_bmp_init()
99 {
100 	void *msr_bitmap;
101 	u32 ctrl_cpu0;
102 
103 	msr_bitmap = alloc_page();
104 	memset(msr_bitmap, 0x0, PAGE_SIZE);
105 	ctrl_cpu0 = vmcs_read(CPU_EXEC_CTRL0);
106 	ctrl_cpu0 |= CPU_MSR_BITMAP;
107 	vmcs_write(CPU_EXEC_CTRL0, ctrl_cpu0);
108 	vmcs_write(MSR_BITMAP, (u64)msr_bitmap);
109 }
110 
111 static void test_ctrl_pat_init()
112 {
113 	u64 ctrl_ent;
114 	u64 ctrl_exi;
115 
116 	msr_bmp_init();
117 	ctrl_ent = vmcs_read(ENT_CONTROLS);
118 	ctrl_exi = vmcs_read(EXI_CONTROLS);
119 	vmcs_write(ENT_CONTROLS, ctrl_ent | ENT_LOAD_PAT);
120 	vmcs_write(EXI_CONTROLS, ctrl_exi | (EXI_SAVE_PAT | EXI_LOAD_PAT));
121 	ia32_pat = rdmsr(MSR_IA32_CR_PAT);
122 	vmcs_write(GUEST_PAT, 0x0);
123 	vmcs_write(HOST_PAT, ia32_pat);
124 }
125 
126 static void test_ctrl_pat_main()
127 {
128 	u64 guest_ia32_pat;
129 
130 	guest_ia32_pat = rdmsr(MSR_IA32_CR_PAT);
131 	if (!(ctrl_enter_rev.clr & ENT_LOAD_PAT))
132 		printf("\tENT_LOAD_PAT is not supported.\n");
133 	else {
134 		if (guest_ia32_pat != 0) {
135 			report("Entry load PAT", 0);
136 			return;
137 		}
138 	}
139 	wrmsr(MSR_IA32_CR_PAT, 0x6);
140 	vmcall();
141 	guest_ia32_pat = rdmsr(MSR_IA32_CR_PAT);
142 	if (ctrl_enter_rev.clr & ENT_LOAD_PAT) {
143 		if (guest_ia32_pat != ia32_pat) {
144 			report("Entry load PAT", 0);
145 			return;
146 		}
147 		report("Entry load PAT", 1);
148 	}
149 }
150 
151 static int test_ctrl_pat_exit_handler()
152 {
153 	u64 guest_rip;
154 	ulong reason;
155 	u64 guest_pat;
156 
157 	guest_rip = vmcs_read(GUEST_RIP);
158 	reason = vmcs_read(EXI_REASON) & 0xff;
159 	switch (reason) {
160 	case VMX_VMCALL:
161 		guest_pat = vmcs_read(GUEST_PAT);
162 		if (!(ctrl_exit_rev.clr & EXI_SAVE_PAT)) {
163 			printf("\tEXI_SAVE_PAT is not supported\n");
164 			vmcs_write(GUEST_PAT, 0x6);
165 		} else {
166 			if (guest_pat == 0x6)
167 				report("Exit save PAT", 1);
168 			else
169 				report("Exit save PAT", 0);
170 		}
171 		if (!(ctrl_exit_rev.clr & EXI_LOAD_PAT))
172 			printf("\tEXI_LOAD_PAT is not supported\n");
173 		else {
174 			if (rdmsr(MSR_IA32_CR_PAT) == ia32_pat)
175 				report("Exit load PAT", 1);
176 			else
177 				report("Exit load PAT", 0);
178 		}
179 		vmcs_write(GUEST_PAT, ia32_pat);
180 		vmcs_write(GUEST_RIP, guest_rip + 3);
181 		return VMX_TEST_RESUME;
182 	default:
183 		printf("ERROR : Undefined exit reason, reason = %d.\n", reason);
184 		break;
185 	}
186 	return VMX_TEST_VMEXIT;
187 }
188 
189 static void test_ctrl_efer_init()
190 {
191 	u64 ctrl_ent;
192 	u64 ctrl_exi;
193 
194 	msr_bmp_init();
195 	ctrl_ent = vmcs_read(ENT_CONTROLS) | ENT_LOAD_EFER;
196 	ctrl_exi = vmcs_read(EXI_CONTROLS) | EXI_SAVE_EFER | EXI_LOAD_EFER;
197 	vmcs_write(ENT_CONTROLS, ctrl_ent & ctrl_enter_rev.clr);
198 	vmcs_write(EXI_CONTROLS, ctrl_exi & ctrl_exit_rev.clr);
199 	ia32_efer = rdmsr(MSR_EFER);
200 	vmcs_write(GUEST_EFER, ia32_efer ^ EFER_NX);
201 	vmcs_write(HOST_EFER, ia32_efer ^ EFER_NX);
202 }
203 
204 static void test_ctrl_efer_main()
205 {
206 	u64 guest_ia32_efer;
207 
208 	guest_ia32_efer = rdmsr(MSR_EFER);
209 	if (!(ctrl_enter_rev.clr & ENT_LOAD_EFER))
210 		printf("\tENT_LOAD_EFER is not supported.\n");
211 	else {
212 		if (guest_ia32_efer != (ia32_efer ^ EFER_NX)) {
213 			report("Entry load EFER", 0);
214 			return;
215 		}
216 	}
217 	wrmsr(MSR_EFER, ia32_efer);
218 	vmcall();
219 	guest_ia32_efer = rdmsr(MSR_EFER);
220 	if (ctrl_enter_rev.clr & ENT_LOAD_EFER) {
221 		if (guest_ia32_efer != ia32_efer) {
222 			report("Entry load EFER", 0);
223 			return;
224 		}
225 		report("Entry load EFER", 1);
226 	}
227 }
228 
229 static int test_ctrl_efer_exit_handler()
230 {
231 	u64 guest_rip;
232 	ulong reason;
233 	u64 guest_efer;
234 
235 	guest_rip = vmcs_read(GUEST_RIP);
236 	reason = vmcs_read(EXI_REASON) & 0xff;
237 	switch (reason) {
238 	case VMX_VMCALL:
239 		guest_efer = vmcs_read(GUEST_EFER);
240 		if (!(ctrl_exit_rev.clr & EXI_SAVE_EFER)) {
241 			printf("\tEXI_SAVE_EFER is not supported\n");
242 			vmcs_write(GUEST_EFER, ia32_efer);
243 		} else {
244 			if (guest_efer == ia32_efer)
245 				report("Exit save EFER", 1);
246 			else
247 				report("Exit save EFER", 0);
248 		}
249 		if (!(ctrl_exit_rev.clr & EXI_LOAD_EFER)) {
250 			printf("\tEXI_LOAD_EFER is not supported\n");
251 			wrmsr(MSR_EFER, ia32_efer ^ EFER_NX);
252 		} else {
253 			if (rdmsr(MSR_EFER) == (ia32_efer ^ EFER_NX))
254 				report("Exit load EFER", 1);
255 			else
256 				report("Exit load EFER", 0);
257 		}
258 		vmcs_write(GUEST_PAT, ia32_efer);
259 		vmcs_write(GUEST_RIP, guest_rip + 3);
260 		return VMX_TEST_RESUME;
261 	default:
262 		printf("ERROR : Undefined exit reason, reason = %d.\n", reason);
263 		break;
264 	}
265 	return VMX_TEST_VMEXIT;
266 }
267 
268 u32 guest_cr0, guest_cr4;
269 
270 static void cr_shadowing_main()
271 {
272 	u32 cr0, cr4, tmp;
273 
274 	// Test read through
275 	set_stage(0);
276 	guest_cr0 = read_cr0();
277 	if (stage == 1)
278 		report("Read through CR0", 0);
279 	else
280 		vmcall();
281 	set_stage(1);
282 	guest_cr4 = read_cr4();
283 	if (stage == 2)
284 		report("Read through CR4", 0);
285 	else
286 		vmcall();
287 	// Test write through
288 	guest_cr0 = guest_cr0 ^ (X86_CR0_TS | X86_CR0_MP);
289 	guest_cr4 = guest_cr4 ^ (X86_CR4_TSD | X86_CR4_DE);
290 	set_stage(2);
291 	write_cr0(guest_cr0);
292 	if (stage == 3)
293 		report("Write throuth CR0", 0);
294 	else
295 		vmcall();
296 	set_stage(3);
297 	write_cr4(guest_cr4);
298 	if (stage == 4)
299 		report("Write through CR4", 0);
300 	else
301 		vmcall();
302 	// Test read shadow
303 	set_stage(4);
304 	vmcall();
305 	cr0 = read_cr0();
306 	if (stage != 5) {
307 		if (cr0 == guest_cr0)
308 			report("Read shadowing CR0", 1);
309 		else
310 			report("Read shadowing CR0", 0);
311 	}
312 	set_stage(5);
313 	cr4 = read_cr4();
314 	if (stage != 6) {
315 		if (cr4 == guest_cr4)
316 			report("Read shadowing CR4", 1);
317 		else
318 			report("Read shadowing CR4", 0);
319 	}
320 	// Test write shadow (same value with shadow)
321 	set_stage(6);
322 	write_cr0(guest_cr0);
323 	if (stage == 7)
324 		report("Write shadowing CR0 (same value with shadow)", 0);
325 	else
326 		vmcall();
327 	set_stage(7);
328 	write_cr4(guest_cr4);
329 	if (stage == 8)
330 		report("Write shadowing CR4 (same value with shadow)", 0);
331 	else
332 		vmcall();
333 	// Test write shadow (different value)
334 	set_stage(8);
335 	tmp = guest_cr0 ^ X86_CR0_TS;
336 	asm volatile("mov %0, %%rsi\n\t"
337 		"mov %%rsi, %%cr0\n\t"
338 		::"m"(tmp)
339 		:"rsi", "memory", "cc");
340 	if (stage != 9)
341 		report("Write shadowing different X86_CR0_TS", 0);
342 	else
343 		report("Write shadowing different X86_CR0_TS", 1);
344 	set_stage(9);
345 	tmp = guest_cr0 ^ X86_CR0_MP;
346 	asm volatile("mov %0, %%rsi\n\t"
347 		"mov %%rsi, %%cr0\n\t"
348 		::"m"(tmp)
349 		:"rsi", "memory", "cc");
350 	if (stage != 10)
351 		report("Write shadowing different X86_CR0_MP", 0);
352 	else
353 		report("Write shadowing different X86_CR0_MP", 1);
354 	set_stage(10);
355 	tmp = guest_cr4 ^ X86_CR4_TSD;
356 	asm volatile("mov %0, %%rsi\n\t"
357 		"mov %%rsi, %%cr4\n\t"
358 		::"m"(tmp)
359 		:"rsi", "memory", "cc");
360 	if (stage != 11)
361 		report("Write shadowing different X86_CR4_TSD", 0);
362 	else
363 		report("Write shadowing different X86_CR4_TSD", 1);
364 	set_stage(11);
365 	tmp = guest_cr4 ^ X86_CR4_DE;
366 	asm volatile("mov %0, %%rsi\n\t"
367 		"mov %%rsi, %%cr4\n\t"
368 		::"m"(tmp)
369 		:"rsi", "memory", "cc");
370 	if (stage != 12)
371 		report("Write shadowing different X86_CR4_DE", 0);
372 	else
373 		report("Write shadowing different X86_CR4_DE", 1);
374 }
375 
376 static int cr_shadowing_exit_handler()
377 {
378 	u64 guest_rip;
379 	ulong reason;
380 	u32 insn_len;
381 	u32 exit_qual;
382 
383 	guest_rip = vmcs_read(GUEST_RIP);
384 	reason = vmcs_read(EXI_REASON) & 0xff;
385 	insn_len = vmcs_read(EXI_INST_LEN);
386 	exit_qual = vmcs_read(EXI_QUALIFICATION);
387 	switch (reason) {
388 	case VMX_VMCALL:
389 		switch (stage) {
390 		case 0:
391 			if (guest_cr0 == vmcs_read(GUEST_CR0))
392 				report("Read through CR0", 1);
393 			else
394 				report("Read through CR0", 0);
395 			break;
396 		case 1:
397 			if (guest_cr4 == vmcs_read(GUEST_CR4))
398 				report("Read through CR4", 1);
399 			else
400 				report("Read through CR4", 0);
401 			break;
402 		case 2:
403 			if (guest_cr0 == vmcs_read(GUEST_CR0))
404 				report("Write through CR0", 1);
405 			else
406 				report("Write through CR0", 0);
407 			break;
408 		case 3:
409 			if (guest_cr4 == vmcs_read(GUEST_CR4))
410 				report("Write through CR4", 1);
411 			else
412 				report("Write through CR4", 0);
413 			break;
414 		case 4:
415 			guest_cr0 = vmcs_read(GUEST_CR0) ^ (X86_CR0_TS | X86_CR0_MP);
416 			guest_cr4 = vmcs_read(GUEST_CR4) ^ (X86_CR4_TSD | X86_CR4_DE);
417 			vmcs_write(CR0_MASK, X86_CR0_TS | X86_CR0_MP);
418 			vmcs_write(CR0_READ_SHADOW, guest_cr0 & (X86_CR0_TS | X86_CR0_MP));
419 			vmcs_write(CR4_MASK, X86_CR4_TSD | X86_CR4_DE);
420 			vmcs_write(CR4_READ_SHADOW, guest_cr4 & (X86_CR4_TSD | X86_CR4_DE));
421 			break;
422 		case 6:
423 			if (guest_cr0 == (vmcs_read(GUEST_CR0) ^ (X86_CR0_TS | X86_CR0_MP)))
424 				report("Write shadowing CR0 (same value)", 1);
425 			else
426 				report("Write shadowing CR0 (same value)", 0);
427 			break;
428 		case 7:
429 			if (guest_cr4 == (vmcs_read(GUEST_CR4) ^ (X86_CR4_TSD | X86_CR4_DE)))
430 				report("Write shadowing CR4 (same value)", 1);
431 			else
432 				report("Write shadowing CR4 (same value)", 0);
433 			break;
434 		}
435 		vmcs_write(GUEST_RIP, guest_rip + insn_len);
436 		return VMX_TEST_RESUME;
437 	case VMX_CR:
438 		switch (stage) {
439 		case 4:
440 			report("Read shadowing CR0", 0);
441 			set_stage(stage + 1);
442 			break;
443 		case 5:
444 			report("Read shadowing CR4", 0);
445 			set_stage(stage + 1);
446 			break;
447 		case 6:
448 			report("Write shadowing CR0 (same value)", 0);
449 			set_stage(stage + 1);
450 			break;
451 		case 7:
452 			report("Write shadowing CR4 (same value)", 0);
453 			set_stage(stage + 1);
454 			break;
455 		case 8:
456 		case 9:
457 			// 0x600 encodes "mov %esi, %cr0"
458 			if (exit_qual == 0x600)
459 				set_stage(stage + 1);
460 			break;
461 		case 10:
462 		case 11:
463 			// 0x604 encodes "mov %esi, %cr4"
464 			if (exit_qual == 0x604)
465 				set_stage(stage + 1);
466 			break;
467 		}
468 		vmcs_write(GUEST_RIP, guest_rip + insn_len);
469 		return VMX_TEST_RESUME;
470 	default:
471 		printf("Unknown exit reason, %d\n", reason);
472 		print_vmexit_info();
473 	}
474 	return VMX_TEST_VMEXIT;
475 }
476 
477 /* name/init/guest_main/exit_handler/syscall_handler/guest_regs
478    basic_* just implement some basic functions */
479 struct vmx_test vmx_tests[] = {
480 	{ "null", basic_init, basic_guest_main, basic_exit_handler,
481 		basic_syscall_handler, {0} },
482 	{ "vmenter", basic_init, vmenter_main, vmenter_exit_handler,
483 		basic_syscall_handler, {0} },
484 	{ "control field PAT", test_ctrl_pat_init, test_ctrl_pat_main,
485 		test_ctrl_pat_exit_handler, basic_syscall_handler, {0} },
486 	{ "control field EFER", test_ctrl_efer_init, test_ctrl_efer_main,
487 		test_ctrl_efer_exit_handler, basic_syscall_handler, {0} },
488 	{ "CR shadowing", basic_init, cr_shadowing_main,
489 		cr_shadowing_exit_handler, basic_syscall_handler, {0} },
490 	{ NULL, NULL, NULL, NULL, NULL, {0} },
491 };
492