xref: /kvm-unit-tests/x86/emulator64.c (revision cd5f2fb4ad641c51fe0f1a85264dc3f6ede6e131)
1 #define MAGIC_NUM 0xdeadbeefdeadbeefUL
2 #define GS_BASE 0x400000
3 
test_cr8(void)4 static void test_cr8(void)
5 {
6 	unsigned long src, dst;
7 
8 	dst = 777;
9 	src = 3;
10 	asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]"
11 		     : [dst]"+r"(dst), [src]"+r"(src));
12 	report(dst == 3 && src == 3, "mov %%cr8");
13 }
14 
test_push(void * mem)15 static void test_push(void *mem)
16 {
17 	unsigned long tmp;
18 	unsigned long *stack_top = mem + 4096;
19 	unsigned long *new_stack_top;
20 	unsigned long memw = 0x123456789abcdeful;
21 
22 	memset(mem, 0x55, (void *)stack_top - mem);
23 
24 	asm volatile("mov %%rsp, %[tmp] \n\t"
25 		     "mov %[stack_top], %%rsp \n\t"
26 		     "pushq $-7 \n\t"
27 		     "pushq %[reg] \n\t"
28 		     "pushq (%[mem]) \n\t"
29 		     "pushq $-7070707 \n\t"
30 		     "mov %%rsp, %[new_stack_top] \n\t"
31 		     "mov %[tmp], %%rsp"
32 		     : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top)
33 		     : [stack_top]"r"(stack_top),
34 		       [reg]"r"(-17l), [mem]"r"(&memw)
35 		     : "memory");
36 
37 	report(stack_top[-1] == -7ul, "push $imm8");
38 	report(stack_top[-2] == -17ul, "push %%reg");
39 	report(stack_top[-3] == 0x123456789abcdeful, "push mem");
40 	report(stack_top[-4] == -7070707, "push $imm");
41 }
42 
test_pop(void * mem)43 static void test_pop(void *mem)
44 {
45 	unsigned long tmp, tmp3, rsp, rbp;
46 	unsigned long *stack_top = mem + 4096;
47 	unsigned long memw = 0x123456789abcdeful;
48 	static unsigned long tmp2;
49 
50 	memset(mem, 0x55, (void *)stack_top - mem);
51 
52 	asm volatile("pushq %[val] \n\t"
53 		     "popq (%[mem])"
54 		     : : [val]"m"(memw), [mem]"r"(mem) : "memory");
55 	report(*(unsigned long *)mem == memw, "pop mem");
56 
57 	memw = 7 - memw;
58 	asm volatile("mov %%rsp, %[tmp] \n\t"
59 		     "mov %[stack_top], %%rsp \n\t"
60 		     "pushq %[val] \n\t"
61 		     "popq %[tmp2] \n\t"
62 		     "mov %[tmp], %%rsp"
63 		     : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2)
64 		     : [val]"r"(memw), [stack_top]"r"(stack_top)
65 		     : "memory");
66 	report(tmp2 == memw, "pop mem (2)");
67 
68 	memw = 129443 - memw;
69 	asm volatile("mov %%rsp, %[tmp] \n\t"
70 		     "mov %[stack_top], %%rsp \n\t"
71 		     "pushq %[val] \n\t"
72 		     "popq %[tmp2] \n\t"
73 		     "mov %[tmp], %%rsp"
74 		     : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2)
75 		     : [val]"r"(memw), [stack_top]"r"(stack_top)
76 		     : "memory");
77 	report(tmp2 == memw, "pop reg");
78 
79 	asm volatile("mov %%rsp, %[tmp] \n\t"
80 		     "mov %[stack_top], %%rsp \n\t"
81 		     "lea 1f(%%rip), %%rax \n\t"
82 		     "push %%rax \n\t"
83 		     "ret \n\t"
84 		     "2: jmp 2b \n\t"
85 		     "1: mov %[tmp], %%rsp"
86 		     : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top)
87 		     : "memory", "rax");
88 	report_pass("ret");
89 
90 	stack_top[-1] = 0x778899;
91 	asm volatile("mov %[stack_top], %%r8 \n\t"
92 		     "mov %%rsp, %%r9 \n\t"
93 		     "xchg %%rbp, %%r8 \n\t"
94 		     "leave \n\t"
95 		     "xchg %%rsp, %%r9 \n\t"
96 		     "xchg %%rbp, %%r8 \n\t"
97 		     "mov %%r9, %[tmp] \n\t"
98 		     "mov %%r8, %[tmp3]"
99 		     : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1)
100 		     : "memory", "r8", "r9");
101 	report(tmp == (ulong)stack_top && tmp3 == 0x778899, "leave");
102 
103 	rbp = 0xaa55aa55bb66bb66ULL;
104 	rsp = (unsigned long)stack_top;
105 	asm volatile("mov %[rsp], %%r8 \n\t"
106 		     "mov %[rbp], %%r9 \n\t"
107 		     "xchg %%rsp, %%r8 \n\t"
108 		     "xchg %%rbp, %%r9 \n\t"
109 		     "enter $0x1238, $0 \n\t"
110 		     "xchg %%rsp, %%r8 \n\t"
111 		     "xchg %%rbp, %%r9 \n\t"
112 		     "xchg %%r8, %[rsp] \n\t"
113 		     "xchg %%r9, %[rbp]"
114 		     : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory", "r8", "r9");
115 	report(rsp == (unsigned long)stack_top - 8 - 0x1238
116 	       && rbp == (unsigned long)stack_top - 8
117 	       && stack_top[-1] == 0xaa55aa55bb66bb66ULL,
118 	       "enter");
119 }
120 
test_ljmp(void * mem)121 static void test_ljmp(void *mem)
122 {
123 	unsigned char *m = mem;
124 	volatile int res = 1;
125 
126 	*(unsigned long**)m = &&jmpf;
127 	asm volatile ("data16 mov %%cs, %0":"=m"(*(m + sizeof(unsigned long))));
128 	asm volatile ("rex64 ljmp *%0"::"m"(*m));
129 	res = 0;
130 jmpf:
131 	report(res, "ljmp");
132 }
133 
test_xchg(void * mem)134 static void test_xchg(void *mem)
135 {
136 	unsigned long *memq = mem;
137 	unsigned long rax;
138 
139 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
140 		     "mov %%rax, (%[memq])\n\t"
141 		     "mov $0xfedcba9876543210, %%rax\n\t"
142 		     "xchg %%al, (%[memq])\n\t"
143 		     "mov %%rax, %[rax]\n\t"
144 		     : [rax]"=r"(rax)
145 		     : [memq]"r"(memq)
146 		     : "memory", "rax");
147 	report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10,
148 	       "xchg reg, r/m (1)");
149 
150 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
151 		     "mov %%rax, (%[memq])\n\t"
152 		     "mov $0xfedcba9876543210, %%rax\n\t"
153 		     "xchg %%ax, (%[memq])\n\t"
154 		     "mov %%rax, %[rax]\n\t"
155 		     : [rax]"=r"(rax)
156 		     : [memq]"r"(memq)
157 		     : "memory", "rax");
158 	report(rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210,
159 	       "xchg reg, r/m (2)");
160 
161 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
162 		     "mov %%rax, (%[memq])\n\t"
163 		     "mov $0xfedcba9876543210, %%rax\n\t"
164 		     "xchg %%eax, (%[memq])\n\t"
165 		     "mov %%rax, %[rax]\n\t"
166 		     : [rax]"=r"(rax)
167 		     : [memq]"r"(memq)
168 		     : "memory", "rax");
169 	report(rax == 0x89abcdef && *memq == 0x123456776543210,
170 	       "xchg reg, r/m (3)");
171 
172 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
173 		     "mov %%rax, (%[memq])\n\t"
174 		     "mov $0xfedcba9876543210, %%rax\n\t"
175 		     "xchg %%rax, (%[memq])\n\t"
176 		     "mov %%rax, %[rax]\n\t"
177 		     : [rax]"=r"(rax)
178 		     : [memq]"r"(memq)
179 		     : "memory", "rax");
180 	report(rax == 0x123456789abcdef && *memq == 0xfedcba9876543210,
181 	       "xchg reg, r/m (4)");
182 }
183 
test_xadd(void * mem)184 static void test_xadd(void *mem)
185 {
186 	unsigned long *memq = mem;
187 	unsigned long rax;
188 
189 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
190 		     "mov %%rax, (%[memq])\n\t"
191 		     "mov $0xfedcba9876543210, %%rax\n\t"
192 		     "xadd %%al, (%[memq])\n\t"
193 		     "mov %%rax, %[rax]\n\t"
194 		     : [rax]"=r"(rax)
195 		     : [memq]"r"(memq)
196 		     : "memory", "rax");
197 	report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff,
198 	       "xadd reg, r/m (1)");
199 
200 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
201 		     "mov %%rax, (%[memq])\n\t"
202 		     "mov $0xfedcba9876543210, %%rax\n\t"
203 		     "xadd %%ax, (%[memq])\n\t"
204 		     "mov %%rax, %[rax]\n\t"
205 		     : [rax]"=r"(rax)
206 		     : [memq]"r"(memq)
207 		     : "memory", "rax");
208 	report(rax == 0xfedcba987654cdef && *memq == 0x123456789abffff,
209 	       "xadd reg, r/m (2)");
210 
211 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
212 		     "mov %%rax, (%[memq])\n\t"
213 		     "mov $0xfedcba9876543210, %%rax\n\t"
214 		     "xadd %%eax, (%[memq])\n\t"
215 		     "mov %%rax, %[rax]\n\t"
216 		     : [rax]"=r"(rax)
217 		     : [memq]"r"(memq)
218 		     : "memory", "rax");
219 	report(rax == 0x89abcdef && *memq == 0x1234567ffffffff,
220 	       "xadd reg, r/m (3)");
221 
222 	asm volatile("mov $0x123456789abcdef, %%rax\n\t"
223 		     "mov %%rax, (%[memq])\n\t"
224 		     "mov $0xfedcba9876543210, %%rax\n\t"
225 		     "xadd %%rax, (%[memq])\n\t"
226 		     "mov %%rax, %[rax]\n\t"
227 		     : [rax]"=r"(rax)
228 		     : [memq]"r"(memq)
229 		     : "memory", "rax");
230 	report(rax == 0x123456789abcdef && *memq == 0xffffffffffffffff,
231 	       "xadd reg, r/m (4)");
232 }
233 
test_muldiv(long * mem)234 static void test_muldiv(long *mem)
235 {
236 	long a, d, aa, dd;
237 	u8 ex = 1;
238 
239 	*mem = 0; a = 1; d = 2;
240 	asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
241 	     : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
242 	report(a == 1 && d == 2 && ex, "divq (fault)");
243 
244 	*mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL;
245 	asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:"
246 	     : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem));
247 	report(a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex, "divq (1)");
248 
249 	aa = 0x1111111111111111; dd = 0x2222222222222222;
250 	*mem = 0x3333333333333333; a = aa; d = dd;
251 	asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem));
252 	report(a == 0x1111111111110363 && d == dd, "mulb mem");
253 	*mem = 0x3333333333333333; a = aa; d = dd;
254 	asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem));
255 	report(a == 0x111111111111c963 && d == 0x2222222222220369, "mulw mem");
256 	*mem = 0x3333333333333333; a = aa; d = dd;
257 	asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem));
258 	report(a == 0x962fc963 && d == 0x369d036, "mull mem");
259 	*mem = 0x3333333333333333; a = aa; d = dd;
260 	asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem));
261 	report(a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0, "mulq mem");
262 }
263 
test_mmx(uint64_t * mem)264 static void test_mmx(uint64_t *mem)
265 {
266 	uint64_t v;
267 
268 	write_cr0(read_cr0() & ~6); /* EM, TS */
269 	asm volatile("fninit");
270 	v = 0x0102030405060708ULL;
271 	asm("movq %1, %0" : "=m"(*mem) : "y"(v));
272 	report(v == *mem, "movq (mmx, read)");
273 	*mem = 0x8070605040302010ull;
274 	asm("movq %1, %0" : "=y"(v) : "m"(*mem));
275 	report(v == *mem, "movq (mmx, write)");
276 }
277 
test_rip_relative(unsigned * mem,char * insn_ram)278 static void test_rip_relative(unsigned *mem, char *insn_ram)
279 {
280 	/* movb $1, mem+2(%rip) */
281 	insn_ram[0] = 0xc6;
282 	insn_ram[1] = 0x05;
283 	*(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7);
284 	insn_ram[6] = 0x01;
285 	/* ret */
286 	insn_ram[7] = 0xc3;
287 
288 	*mem = 0;
289 	asm("callq *%1" : "+m"(*mem) : "r"(insn_ram));
290 	report(*mem == 0x10000, "movb $imm, 0(%%rip)");
291 }
292 
test_cmov(u32 * mem)293 static void test_cmov(u32 *mem)
294 {
295 	u64 val;
296 	*mem = 0xabcdef12u;
297 	asm ("movq $0x1234567812345678, %%rax\n\t"
298 	     "cmpl %%eax, %%eax\n\t"
299 	     "cmovnel (%[mem]), %%eax\n\t"
300 	     "movq %%rax, %[val]\n\t"
301 	     : [val]"=r"(val) : [mem]"r"(mem) : "%rax", "cc");
302 	report(val == 0x12345678ul, "cmovnel");
303 }
304 
305 
test_mmx_movq_mf(uint64_t * mem)306 static void test_mmx_movq_mf(uint64_t *mem)
307 {
308 	uint16_t fcw = 0;  /* all exceptions unmasked */
309 	uint64_t val;
310 
311 	write_cr0(read_cr0() & ~(X86_CR0_TS | X86_CR0_EM));
312 	asm volatile("fninit\n\t"
313 		     "fldcw %[fcw]\n\t"
314 		     "fldz\n\t"
315 		     "fldz\n\t"
316 		     /* generate exception (0.0 / 0.0) */
317 		     "fdivp\n\t"
318 		     /* trigger #MF */
319 		     ASM_TRY_FEP("1f") "movq %%mm0, %[val]\n\t"
320 		     /* exit MMX mode */
321 		     "1: fnclex\n\t"
322 		     "emms\n\t"
323 		     : [val]"=m"(val)
324 		     : [fcw]"m"(fcw));
325 	report(exception_vector() == MF_VECTOR, "movq mmx generates #MF");
326 }
327 
test_jmp_noncanonical(uint64_t * mem)328 static void test_jmp_noncanonical(uint64_t *mem)
329 {
330 	*mem = NONCANONICAL;
331 	asm volatile (ASM_TRY("1f") "jmp *%0; 1:" : : "m"(*mem));
332 	report(exception_vector() == GP_VECTOR,
333 	       "jump to non-canonical address");
334 }
335 
test_reg_noncanonical(void)336 static void test_reg_noncanonical(void)
337 {
338 	/* RAX based, should #GP(0) */
339 	asm volatile(ASM_TRY("1f") "orq $0, (%[noncanonical]); 1:"
340 		     : : [noncanonical]"a"(NONCANONICAL));
341 	report(exception_vector() == GP_VECTOR && exception_error_code() == 0,
342 	       "non-canonical memory access, should %s(0), got %s(%u)",
343 	       exception_mnemonic(GP_VECTOR),
344 	       exception_mnemonic(exception_vector()), exception_error_code());
345 
346 	/* RSP based, should #SS(0) */
347 	asm volatile(ASM_TRY("1f") "orq $0, (%%rsp,%[noncanonical],1); 1:"
348 		     : : [noncanonical]"r"(NONCANONICAL));
349 	report(exception_vector() == SS_VECTOR && exception_error_code() == 0,
350 	       "non-canonical rsp-based access, should %s(0), got %s(%u)",
351 	       exception_mnemonic(SS_VECTOR),
352 	       exception_mnemonic(exception_vector()), exception_error_code());
353 
354 	/* RBP based, should #SS(0) */
355 	asm volatile(ASM_TRY("1f") "orq $0, (%%rbp,%[noncanonical],1); 1:"
356 		     : : [noncanonical]"r"(NONCANONICAL));
357 	report(exception_vector() == SS_VECTOR && exception_error_code() == 0,
358 	       "non-canonical rbp-based access, should %s(0), got %s(%u)",
359 	       exception_mnemonic(SS_VECTOR),
360 	       exception_mnemonic(exception_vector()), exception_error_code());
361 }
362 
test_movabs(uint64_t * mem)363 static void test_movabs(uint64_t *mem)
364 {
365 	/* mov $0x9090909090909090, %rcx */
366 	unsigned long rcx;
367 	asm(KVM_FEP "mov $0x9090909090909090, %0" : "=c" (rcx) : "0" (0));
368 	report(rcx == 0x9090909090909090, "64-bit mov imm2");
369 }
370 
load_dpl0_seg(void)371 static void load_dpl0_seg(void)
372 {
373 	asm volatile(KVM_FEP "mov %0, %%fs" :: "r" (KERNEL_CS)); /* RPL=0 */
374 }
375 
test_user_load_dpl0_seg(void)376 static void test_user_load_dpl0_seg(void)
377 {
378 	bool raised_vector;
379 
380 	run_in_user((usermode_func)load_dpl0_seg, GP_VECTOR, 0, 0, 0, 0,
381 		    &raised_vector);
382 
383 	report(raised_vector, "Wanted #GP on CPL=3 DPL=0 segment load");
384 }
385 
test_push16(uint64_t * mem)386 static void test_push16(uint64_t *mem)
387 {
388 	uint64_t rsp1, rsp2;
389 	uint16_t r;
390 
391 	asm volatile (	"movq %%rsp, %[rsp1]\n\t"
392 			"pushw %[v]\n\t"
393 			"popw %[r]\n\t"
394 			"movq %%rsp, %[rsp2]\n\t"
395 			"movq %[rsp1], %%rsp\n\t" :
396 			[rsp1]"=r"(rsp1), [rsp2]"=r"(rsp2), [r]"=r"(r)
397 			: [v]"m"(*mem) : "memory");
398 	report(rsp1 == rsp2, "push16");
399 }
400 
test_sreg(volatile uint16_t * mem)401 static void test_sreg(volatile uint16_t *mem)
402 {
403 	u16 ss = read_ss();
404 
405 	// check for null segment load
406 	*mem = 0;
407 	asm volatile("mov %0, %%ss" : : "m"(*mem));
408 	report(read_ss() == 0, "mov null, %%ss");
409 
410 	// check for exception when ss.rpl != cpl on null segment load
411 	*mem = 3;
412 	asm volatile(ASM_TRY("1f") "mov %0, %%ss; 1:" : : "m"(*mem));
413 	report(exception_vector() == GP_VECTOR &&
414 	       exception_error_code() == 0 && read_ss() == 0,
415 	       "mov null, %%ss (with ss.rpl != cpl)");
416 
417 	// check for exception when ss.rpl != cpl on non-null segment load
418 	*mem = KERNEL_DS | 3;
419 	asm volatile(ASM_TRY("1f") "mov %0, %%ss; 1:" : : "m"(*mem));
420 	report(exception_vector() == GP_VECTOR &&
421 	       exception_error_code() == KERNEL_DS && read_ss() == 0,
422 	       "mov non-null, %%ss (with ss.rpl != cpl)");
423 
424 	write_ss(ss);
425 }
426 
usr_gs_mov(void)427 static uint64_t usr_gs_mov(void)
428 {
429 	static uint64_t dummy = MAGIC_NUM;
430 	uint64_t dummy_ptr = (uint64_t)&dummy;
431 	uint64_t ret;
432 
433 	dummy_ptr -= GS_BASE;
434 	asm volatile("mov %%gs:(%1), %0" : "=r"(ret) : "r"(dummy_ptr));
435 
436 	return ret;
437 }
438 
test_iret(void)439 static void test_iret(void)
440 {
441 	uint64_t val;
442 	bool raised_vector;
443 
444 	/* Update GS base to 4MiB */
445 	wrmsr(MSR_GS_BASE, GS_BASE);
446 
447 	/*
448 	* Per the SDM, jumping to user mode via `iret`, which is returning to
449 	* outer privilege level, for segment registers (ES, FS, GS, and DS)
450 	* if the check fails, the segment selector becomes null.
451 	*
452 	* In our test case, GS becomes null.
453 	*/
454 	val = run_in_user((usermode_func)usr_gs_mov, GP_VECTOR,
455 			0, 0, 0, 0, &raised_vector);
456 
457 	report(val == MAGIC_NUM, "Test ret/iret with a nullified segment");
458 }
459 
test_emulator_64(void * mem)460 static void test_emulator_64(void *mem)
461 {
462 	void *insn_page = alloc_page();
463 	void *insn_ram  = vmap(virt_to_phys(insn_page), 4096);
464 
465 	test_push(mem);
466 	test_pop(mem);
467 
468 	test_xchg(mem);
469 	test_xadd(mem);
470 
471 	test_cr8();
472 
473 	test_ljmp(mem);
474 	test_muldiv(mem);
475 	test_mmx(mem);
476 	test_rip_relative(mem, insn_ram);
477 	test_iret();
478 	test_sreg(mem);
479 	test_cmov(mem);
480 
481 	if (is_fep_available()) {
482 		test_mmx_movq_mf(mem);
483 		test_movabs(mem);
484 		test_user_load_dpl0_seg();
485 	}
486 
487 	test_push16(mem);
488 
489 	test_reg_noncanonical();
490 	test_jmp_noncanonical(mem);
491 }
492