xref: /kvm-unit-tests/x86/emulator64.c (revision 1d0f08f40d53daa39566842ec46a112db5f7e524)
1 #define MAGIC_NUM 0xdeadbeefdeadbeefUL
2 #define GS_BASE 0x400000
3 
4 static unsigned long rip_advance;
5 
6 static void advance_rip_and_note_exception(struct ex_regs *regs)
7 {
8 	++exceptions;
9 	regs->rip += rip_advance;
10 }
11 
12 static void test_cr8(void)
13 {
14 	unsigned long src, dst;
15 
16 	dst = 777;
17 	src = 3;
18 	asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]"
19 		     : [dst]"+r"(dst), [src]"+r"(src));
20 	report(dst == 3 && src == 3, "mov %%cr8");
21 }
22 
23 static void test_push(void *mem)
24 {
25 	unsigned long tmp;
26 	unsigned long *stack_top = mem + 4096;
27 	unsigned long *new_stack_top;
28 	unsigned long memw = 0x123456789abcdeful;
29 
30 	memset(mem, 0x55, (void *)stack_top - mem);
31 
32 	asm volatile("mov %%rsp, %[tmp] \n\t"
33 		     "mov %[stack_top], %%rsp \n\t"
34 		     "pushq $-7 \n\t"
35 		     "pushq %[reg] \n\t"
36 		     "pushq (%[mem]) \n\t"
37 		     "pushq $-7070707 \n\t"
38 		     "mov %%rsp, %[new_stack_top] \n\t"
39 		     "mov %[tmp], %%rsp"
40 		     : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top)
41 		     : [stack_top]"r"(stack_top),
42 		       [reg]"r"(-17l), [mem]"r"(&memw)
43 		     : "memory");
44 
45 	report(stack_top[-1] == -7ul, "push $imm8");
46 	report(stack_top[-2] == -17ul, "push %%reg");
47 	report(stack_top[-3] == 0x123456789abcdeful, "push mem");
48 	report(stack_top[-4] == -7070707, "push $imm");
49 }
50 
51 static void test_pop(void *mem)
52 {
53 	unsigned long tmp, tmp3, rsp, rbp;
54 	unsigned long *stack_top = mem + 4096;
55 	unsigned long memw = 0x123456789abcdeful;
56 	static unsigned long tmp2;
57 
58 	memset(mem, 0x55, (void *)stack_top - mem);
59 
60 	asm volatile("pushq %[val] \n\t"
61 		     "popq (%[mem])"
62 		     : : [val]"m"(memw), [mem]"r"(mem) : "memory");
63 	report(*(unsigned long *)mem == memw, "pop mem");
64 
65 	memw = 7 - memw;
66 	asm volatile("mov %%rsp, %[tmp] \n\t"
67 		     "mov %[stack_top], %%rsp \n\t"
68 		     "pushq %[val] \n\t"
69 		     "popq %[tmp2] \n\t"
70 		     "mov %[tmp], %%rsp"
71 		     : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2)
72 		     : [val]"r"(memw), [stack_top]"r"(stack_top)
73 		     : "memory");
74 	report(tmp2 == memw, "pop mem (2)");
75 
76 	memw = 129443 - memw;
77 	asm volatile("mov %%rsp, %[tmp] \n\t"
78 		     "mov %[stack_top], %%rsp \n\t"
79 		     "pushq %[val] \n\t"
80 		     "popq %[tmp2] \n\t"
81 		     "mov %[tmp], %%rsp"
82 		     : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2)
83 		     : [val]"r"(memw), [stack_top]"r"(stack_top)
84 		     : "memory");
85 	report(tmp2 == memw, "pop reg");
86 
87 	asm volatile("mov %%rsp, %[tmp] \n\t"
88 		     "mov %[stack_top], %%rsp \n\t"
89 		     "lea 1f(%%rip), %%rax \n\t"
90 		     "push %%rax \n\t"
91 		     "ret \n\t"
92 		     "2: jmp 2b \n\t"
93 		     "1: mov %[tmp], %%rsp"
94 		     : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top)
95 		     : "memory", "rax");
96 	report_pass("ret");
97 
98 	stack_top[-1] = 0x778899;
99 	asm volatile("mov %[stack_top], %%r8 \n\t"
100 		     "mov %%rsp, %%r9 \n\t"
101 		     "xchg %%rbp, %%r8 \n\t"
102 		     "leave \n\t"
103 		     "xchg %%rsp, %%r9 \n\t"
104 		     "xchg %%rbp, %%r8 \n\t"
105 		     "mov %%r9, %[tmp] \n\t"
106 		     "mov %%r8, %[tmp3]"
107 		     : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1)
108 		     : "memory", "r8", "r9");
109 	report(tmp == (ulong)stack_top && tmp3 == 0x778899, "leave");
110 
111 	rbp = 0xaa55aa55bb66bb66ULL;
112 	rsp = (unsigned long)stack_top;
113 	asm volatile("mov %[rsp], %%r8 \n\t"
114 		     "mov %[rbp], %%r9 \n\t"
115 		     "xchg %%rsp, %%r8 \n\t"
116 		     "xchg %%rbp, %%r9 \n\t"
117 		     "enter $0x1238, $0 \n\t"
118 		     "xchg %%rsp, %%r8 \n\t"
119 		     "xchg %%rbp, %%r9 \n\t"
120 		     "xchg %%r8, %[rsp] \n\t"
121 		     "xchg %%r9, %[rbp]"
122 		     : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory", "r8", "r9");
123 	report(rsp == (unsigned long)stack_top - 8 - 0x1238
124 	       && rbp == (unsigned long)stack_top - 8
125 	       && stack_top[-1] == 0xaa55aa55bb66bb66ULL,
126 	       "enter");
127 }
128 
129 static void test_ljmp(void *mem)
130 {
131 	unsigned char *m = mem;
132 	volatile int res = 1;
133 
134 	*(unsigned long**)m = &&jmpf;
135 	asm volatile ("data16 mov %%cs, %0":"=m"(*(m + sizeof(unsigned long))));
136 	asm volatile ("rex64 ljmp *%0"::"m"(*m));
137 	res = 0;
138 jmpf:
139 	report(res, "ljmp");
140 }
141 
142 static void test_xchg(void *mem)
143 {
144 	unsigned long *memq = mem;
145 	unsigned long rax;
146 
147 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
148 		     "mov %%rax, (%[memq])\n\t"
149 		     "mov $0xfedcba9876543210, %%rax\n\t"
150 		     "xchg %%al, (%[memq])\n\t"
151 		     "mov %%rax, %[rax]\n\t"
152 		     : [rax]"=r"(rax)
153 		     : [memq]"r"(memq)
154 		     : "memory", "rax");
155 	report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10,
156 	       "xchg reg, r/m (1)");
157 
158 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
159 		     "mov %%rax, (%[memq])\n\t"
160 		     "mov $0xfedcba9876543210, %%rax\n\t"
161 		     "xchg %%ax, (%[memq])\n\t"
162 		     "mov %%rax, %[rax]\n\t"
163 		     : [rax]"=r"(rax)
164 		     : [memq]"r"(memq)
165 		     : "memory", "rax");
166 	report(rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210,
167 	       "xchg reg, r/m (2)");
168 
169 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
170 		     "mov %%rax, (%[memq])\n\t"
171 		     "mov $0xfedcba9876543210, %%rax\n\t"
172 		     "xchg %%eax, (%[memq])\n\t"
173 		     "mov %%rax, %[rax]\n\t"
174 		     : [rax]"=r"(rax)
175 		     : [memq]"r"(memq)
176 		     : "memory", "rax");
177 	report(rax == 0x89abcdef && *memq == 0x123456776543210,
178 	       "xchg reg, r/m (3)");
179 
180 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
181 		     "mov %%rax, (%[memq])\n\t"
182 		     "mov $0xfedcba9876543210, %%rax\n\t"
183 		     "xchg %%rax, (%[memq])\n\t"
184 		     "mov %%rax, %[rax]\n\t"
185 		     : [rax]"=r"(rax)
186 		     : [memq]"r"(memq)
187 		     : "memory", "rax");
188 	report(rax == 0x123456789abcdef && *memq == 0xfedcba9876543210,
189 	       "xchg reg, r/m (4)");
190 }
191 
192 static void test_xadd(void *mem)
193 {
194 	unsigned long *memq = mem;
195 	unsigned long rax;
196 
197 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
198 		     "mov %%rax, (%[memq])\n\t"
199 		     "mov $0xfedcba9876543210, %%rax\n\t"
200 		     "xadd %%al, (%[memq])\n\t"
201 		     "mov %%rax, %[rax]\n\t"
202 		     : [rax]"=r"(rax)
203 		     : [memq]"r"(memq)
204 		     : "memory", "rax");
205 	report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff,
206 	       "xadd reg, r/m (1)");
207 
208 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
209 		     "mov %%rax, (%[memq])\n\t"
210 		     "mov $0xfedcba9876543210, %%rax\n\t"
211 		     "xadd %%ax, (%[memq])\n\t"
212 		     "mov %%rax, %[rax]\n\t"
213 		     : [rax]"=r"(rax)
214 		     : [memq]"r"(memq)
215 		     : "memory", "rax");
216 	report(rax == 0xfedcba987654cdef && *memq == 0x123456789abffff,
217 	       "xadd reg, r/m (2)");
218 
219 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
220 		     "mov %%rax, (%[memq])\n\t"
221 		     "mov $0xfedcba9876543210, %%rax\n\t"
222 		     "xadd %%eax, (%[memq])\n\t"
223 		     "mov %%rax, %[rax]\n\t"
224 		     : [rax]"=r"(rax)
225 		     : [memq]"r"(memq)
226 		     : "memory", "rax");
227 	report(rax == 0x89abcdef && *memq == 0x1234567ffffffff,
228 	       "xadd reg, r/m (3)");
229 
230 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
231 		     "mov %%rax, (%[memq])\n\t"
232 		     "mov $0xfedcba9876543210, %%rax\n\t"
233 		     "xadd %%rax, (%[memq])\n\t"
234 		     "mov %%rax, %[rax]\n\t"
235 		     : [rax]"=r"(rax)
236 		     : [memq]"r"(memq)
237 		     : "memory", "rax");
238 	report(rax == 0x123456789abcdef && *memq == 0xffffffffffffffff,
239 	       "xadd reg, r/m (4)");
240 }
241 
242 static void test_muldiv(long *mem)
243 {
244 	long a, d, aa, dd;
245 	u8 ex = 1;
246 
247 	*mem = 0; a = 1; d = 2;
248 	asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
249 	     : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
250 	report(a == 1 && d == 2 && ex, "divq (fault)");
251 
252 	*mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL;
253 	asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
254 	     : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
255 	report(a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex, "divq (1)");
256 
257 	aa = 0x1111111111111111; dd = 0x2222222222222222;
258 	*mem = 0x3333333333333333; a = aa; d = dd;
259 	asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem));
260 	report(a == 0x1111111111110363 && d == dd, "mulb mem");
261 	*mem = 0x3333333333333333; a = aa; d = dd;
262 	asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem));
263 	report(a == 0x111111111111c963 && d == 0x2222222222220369, "mulw mem");
264 	*mem = 0x3333333333333333; a = aa; d = dd;
265 	asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem));
266 	report(a == 0x962fc963 && d == 0x369d036, "mull mem");
267 	*mem = 0x3333333333333333; a = aa; d = dd;
268 	asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem));
269 	report(a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0, "mulq mem");
270 }
271 
272 static void test_mmx(uint64_t *mem)
273 {
274 	uint64_t v;
275 
276 	write_cr0(read_cr0() & ~6); /* EM, TS */
277 	asm volatile("fninit");
278 	v = 0x0102030405060708ULL;
279 	asm("movq %1, %0" : "=m"(*mem) : "y"(v));
280 	report(v == *mem, "movq (mmx, read)");
281 	*mem = 0x8070605040302010ull;
282 	asm("movq %1, %0" : "=y"(v) : "m"(*mem));
283 	report(v == *mem, "movq (mmx, write)");
284 }
285 
286 static void test_rip_relative(unsigned *mem, char *insn_ram)
287 {
288 	/* movb $1, mem+2(%rip) */
289 	insn_ram[0] = 0xc6;
290 	insn_ram[1] = 0x05;
291 	*(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7);
292 	insn_ram[6] = 0x01;
293 	/* ret */
294 	insn_ram[7] = 0xc3;
295 
296 	*mem = 0;
297 	asm("callq *%1" : "+m"(*mem) : "r"(insn_ram));
298 	report(*mem == 0x10000, "movb $imm, 0(%%rip)");
299 }
300 
301 static void test_cmov(u32 *mem)
302 {
303 	u64 val;
304 	*mem = 0xabcdef12u;
305 	asm ("movq $0x1234567812345678, %%rax\n\t"
306 	     "cmpl %%eax, %%eax\n\t"
307 	     "cmovnel (%[mem]), %%eax\n\t"
308 	     "movq %%rax, %[val]\n\t"
309 	     : [val]"=r"(val) : [mem]"r"(mem) : "%rax", "cc");
310 	report(val == 0x12345678ul, "cmovnel");
311 }
312 
313 
314 static void test_mmx_movq_mf(uint64_t *mem)
315 {
316 	/* movq %mm0, (%rax) */
317 	extern char movq_start, movq_end;
318 	handler old;
319 
320 	uint16_t fcw = 0;  /* all exceptions unmasked */
321 	write_cr0(read_cr0() & ~6);  /* TS, EM */
322 	exceptions = 0;
323 	old = handle_exception(MF_VECTOR, advance_rip_and_note_exception);
324 	asm volatile("fninit; fldcw %0" : : "m"(fcw));
325 	asm volatile("fldz; fldz; fdivp"); /* generate exception */
326 
327 	rip_advance = &movq_end - &movq_start;
328 	asm(KVM_FEP "movq_start: movq %mm0, (%rax); movq_end:");
329 	/* exit MMX mode */
330 	asm volatile("fnclex; emms");
331 	report(exceptions == 1, "movq mmx generates #MF");
332 	handle_exception(MF_VECTOR, old);
333 }
334 
335 static void test_jmp_noncanonical(uint64_t *mem)
336 {
337 	extern char nc_jmp_start, nc_jmp_end;
338 	handler old;
339 
340 	*mem = 0x1111111111111111ul;
341 
342 	exceptions = 0;
343 	rip_advance = &nc_jmp_end - &nc_jmp_start;
344 	old = handle_exception(GP_VECTOR, advance_rip_and_note_exception);
345 	asm volatile ("nc_jmp_start: jmp *%0; nc_jmp_end:" : : "m"(*mem));
346 	report(exceptions == 1, "jump to non-canonical address");
347 	handle_exception(GP_VECTOR, old);
348 }
349 
350 static void test_movabs(uint64_t *mem)
351 {
352 	/* mov $0x9090909090909090, %rcx */
353 	unsigned long rcx;
354 	asm(KVM_FEP "mov $0x9090909090909090, %0" : "=c" (rcx) : "0" (0));
355 	report(rcx == 0x9090909090909090, "64-bit mov imm2");
356 }
357 
358 static void load_dpl0_seg(void)
359 {
360 	asm volatile(KVM_FEP "mov %0, %%fs" :: "r" (KERNEL_CS)); /* RPL=0 */
361 }
362 
363 static void test_user_load_dpl0_seg(void)
364 {
365 	bool raised_vector;
366 
367 	run_in_user((usermode_func)load_dpl0_seg, GP_VECTOR, 0, 0, 0, 0,
368 		    &raised_vector);
369 
370 	report(raised_vector, "Wanted #GP on CPL=3 DPL=0 segment load");
371 }
372 
373 static void test_push16(uint64_t *mem)
374 {
375 	uint64_t rsp1, rsp2;
376 	uint16_t r;
377 
378 	asm volatile (	"movq %%rsp, %[rsp1]\n\t"
379 			"pushw %[v]\n\t"
380 			"popw %[r]\n\t"
381 			"movq %%rsp, %[rsp2]\n\t"
382 			"movq %[rsp1], %%rsp\n\t" :
383 			[rsp1]"=r"(rsp1), [rsp2]"=r"(rsp2), [r]"=r"(r)
384 			: [v]"m"(*mem) : "memory");
385 	report(rsp1 == rsp2, "push16");
386 }
387 
388 static void ss_bad_rpl(struct ex_regs *regs)
389 {
390 	extern char ss_bad_rpl_cont;
391 
392 	++exceptions;
393 	regs->rip = (ulong)&ss_bad_rpl_cont;
394 }
395 
396 static void test_sreg(volatile uint16_t *mem)
397 {
398 	u16 ss = read_ss();
399 	handler old;
400 
401 	// check for null segment load
402 	*mem = 0;
403 	asm volatile("mov %0, %%ss" : : "m"(*mem));
404 	report(read_ss() == 0, "mov null, %%ss");
405 
406 	// check for exception when ss.rpl != cpl on null segment load
407 	exceptions = 0;
408 	old = handle_exception(GP_VECTOR, ss_bad_rpl);
409 	*mem = 3;
410 	asm volatile("mov %0, %%ss; ss_bad_rpl_cont:" : : "m"(*mem));
411 	report(exceptions == 1 && read_ss() == 0,
412 	       "mov null, %%ss (with ss.rpl != cpl)");
413 	handle_exception(GP_VECTOR, old);
414 	write_ss(ss);
415 }
416 
417 static uint64_t usr_gs_mov(void)
418 {
419 	static uint64_t dummy = MAGIC_NUM;
420 	uint64_t dummy_ptr = (uint64_t)&dummy;
421 	uint64_t ret;
422 
423 	dummy_ptr -= GS_BASE;
424 	asm volatile("mov %%gs:(%%rcx), %%rax" : "=a"(ret): "c"(dummy_ptr) :);
425 
426 	return ret;
427 }
428 
429 static void test_iret(void)
430 {
431 	uint64_t val;
432 	bool raised_vector;
433 
434 	/* Update GS base to 4MiB */
435 	wrmsr(MSR_GS_BASE, GS_BASE);
436 
437 	/*
438 	* Per the SDM, jumping to user mode via `iret`, which is returning to
439 	* outer privilege level, for segment registers (ES, FS, GS, and DS)
440 	* if the check fails, the segment selector becomes null.
441 	*
442 	* In our test case, GS becomes null.
443 	*/
444 	val = run_in_user((usermode_func)usr_gs_mov, GP_VECTOR,
445 			0, 0, 0, 0, &raised_vector);
446 
447 	report(val == MAGIC_NUM, "Test ret/iret with a nullified segment");
448 }
449 
450 static void test_emulator_64(void *mem)
451 {
452 	void *insn_page = alloc_page();
453 	void *insn_ram  = vmap(virt_to_phys(insn_page), 4096);
454 
455 	test_push(mem);
456 	test_pop(mem);
457 
458 	test_xchg(mem);
459 	test_xadd(mem);
460 
461 	test_cr8();
462 
463 	test_ljmp(mem);
464 	test_muldiv(mem);
465 	test_mmx(mem);
466 	test_rip_relative(mem, insn_ram);
467 	test_iret();
468 	test_sreg(mem);
469 	test_cmov(mem);
470 
471 	if (is_fep_available()) {
472 		test_mmx_movq_mf(mem);
473 		test_movabs(mem);
474 		test_user_load_dpl0_seg();
475 	}
476 
477 	test_push16(mem);
478 
479 	test_jmp_noncanonical(mem);
480 }
481