xref: /kvm-unit-tests/x86/emulator.c (revision bbdb7433173e8e48a00330fe10ddc9784a153e74)
1f1dcfd54SSean Christopherson #include <asm/debugreg.h>
2f1dcfd54SSean Christopherson 
37d36db35SAvi Kivity #include "ioram.h"
47d36db35SAvi Kivity #include "vm.h"
57d36db35SAvi Kivity #include "libcflat.h"
6e7c37968SGleb Natapov #include "desc.h"
7d7143f32SAvi Kivity #include "types.h"
8b39a3e14SNadav Amit #include "processor.h"
9efd8e5aaSPaolo Bonzini #include "vmalloc.h"
105aca024eSPaolo Bonzini #include "alloc_page.h"
113ee1b91bSBin Meng #include "usermode.h"
127d36db35SAvi Kivity 
137d36db35SAvi Kivity #define TESTDEV_IO_PORT 0xe0
147d36db35SAvi Kivity 
15d7143f32SAvi Kivity static int exceptions;
16d7143f32SAvi Kivity 
17*bbdb7433SSean Christopherson #ifdef __x86_64__
18*bbdb7433SSean Christopherson #include "emulator64.c"
19*bbdb7433SSean Christopherson #endif
20*bbdb7433SSean Christopherson 
217d36db35SAvi Kivity static char st1[] = "abcdefghijklmnop";
227d36db35SAvi Kivity 
237db17e21SThomas Huth static void test_stringio(void)
247d36db35SAvi Kivity {
257d36db35SAvi Kivity 	unsigned char r = 0;
267d36db35SAvi Kivity 	asm volatile("cld \n\t"
277d36db35SAvi Kivity 		     "movw %0, %%dx \n\t"
287d36db35SAvi Kivity 		     "rep outsb \n\t"
297d36db35SAvi Kivity 		     : : "i"((short)TESTDEV_IO_PORT),
307d36db35SAvi Kivity 		       "S"(st1), "c"(sizeof(st1) - 1));
317d36db35SAvi Kivity 	asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
32a299895bSThomas Huth 	report(r == st1[sizeof(st1) - 2], "outsb up"); /* last char */
337d36db35SAvi Kivity 
347d36db35SAvi Kivity 	asm volatile("std \n\t"
357d36db35SAvi Kivity 		     "movw %0, %%dx \n\t"
367d36db35SAvi Kivity 		     "rep outsb \n\t"
377d36db35SAvi Kivity 		     : : "i"((short)TESTDEV_IO_PORT),
387d36db35SAvi Kivity 		       "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1));
397d36db35SAvi Kivity 	asm volatile("cld \n\t" : : );
407d36db35SAvi Kivity 	asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
41a299895bSThomas Huth 	report(r == st1[0], "outsb down");
427d36db35SAvi Kivity }
437d36db35SAvi Kivity 
44db4898e8SThomas Huth static void test_cmps_one(unsigned char *m1, unsigned char *m3)
457d36db35SAvi Kivity {
467d36db35SAvi Kivity 	void *rsi, *rdi;
477d36db35SAvi Kivity 	long rcx, tmp;
487d36db35SAvi Kivity 
497d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 30;
507d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
512d331a4dSRoman Bolshakov 		     "repe cmpsb"
527d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
537d36db35SAvi Kivity 		     : : "cc");
54a299895bSThomas Huth 	report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)");
557d36db35SAvi Kivity 
5651ba4180SAvi Kivity 	rsi = m1; rdi = m3; rcx = 30;
5751ba4180SAvi Kivity 	asm volatile("or $1, %[tmp]\n\t" // clear ZF
582d331a4dSRoman Bolshakov 		     "repe cmpsb"
5951ba4180SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
6051ba4180SAvi Kivity 		     : : "cc");
61a299895bSThomas Huth 	report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30,
622d331a4dSRoman Bolshakov 	       "repe cmpsb (1.zf)");
6351ba4180SAvi Kivity 
647d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 15;
657d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
662d331a4dSRoman Bolshakov 		     "repe cmpsw"
677d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
687d36db35SAvi Kivity 		     : : "cc");
692d331a4dSRoman Bolshakov 	report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)");
707d36db35SAvi Kivity 
717d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 7;
727d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
732d331a4dSRoman Bolshakov 		     "repe cmpsl"
747d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
757d36db35SAvi Kivity 		     : : "cc");
762d331a4dSRoman Bolshakov 	report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)");
777d36db35SAvi Kivity 
78*bbdb7433SSean Christopherson #ifdef __x86_64__
797d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 4;
807d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
812d331a4dSRoman Bolshakov 		     "repe cmpsq"
827d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
837d36db35SAvi Kivity 		     : : "cc");
842d331a4dSRoman Bolshakov 	report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)");
85*bbdb7433SSean Christopherson #endif
867d36db35SAvi Kivity 
877d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 130;
887d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
892d331a4dSRoman Bolshakov 		     "repe cmpsb"
907d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
917d36db35SAvi Kivity 		     : : "cc");
92a299895bSThomas Huth 	report(rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101,
932d331a4dSRoman Bolshakov 	       "repe cmpsb (2)");
947d36db35SAvi Kivity 
957d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 65;
967d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
972d331a4dSRoman Bolshakov 		     "repe cmpsw"
987d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
997d36db35SAvi Kivity 		     : : "cc");
100a299895bSThomas Huth 	report(rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102,
1012d331a4dSRoman Bolshakov 	       "repe cmpsw (2)");
1027d36db35SAvi Kivity 
1037d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 32;
1047d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
1052d331a4dSRoman Bolshakov 		     "repe cmpsl"
1067d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
1077d36db35SAvi Kivity 		     : : "cc");
108a299895bSThomas Huth 	report(rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104,
1092d331a4dSRoman Bolshakov 	       "repe cmpll (2)");
1107d36db35SAvi Kivity 
111*bbdb7433SSean Christopherson #ifdef __x86_64__
1127d36db35SAvi Kivity 	rsi = m1; rdi = m3; rcx = 16;
1137d36db35SAvi Kivity 	asm volatile("xor %[tmp], %[tmp] \n\t"
1142d331a4dSRoman Bolshakov 		     "repe cmpsq"
1157d36db35SAvi Kivity 		     : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
1167d36db35SAvi Kivity 		     : : "cc");
117a299895bSThomas Huth 	report(rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104,
1182d331a4dSRoman Bolshakov 	       "repe cmpsq (2)");
119*bbdb7433SSean Christopherson #endif
1207d36db35SAvi Kivity }
1217d36db35SAvi Kivity 
122db4898e8SThomas Huth static void test_cmps(void *mem)
1237d36db35SAvi Kivity {
1247d36db35SAvi Kivity 	unsigned char *m1 = mem, *m2 = mem + 1024;
1257d36db35SAvi Kivity 	unsigned char m3[1024];
1267d36db35SAvi Kivity 
1277d36db35SAvi Kivity 	for (int i = 0; i < 100; ++i)
1287d36db35SAvi Kivity 		m1[i] = m2[i] = m3[i] = i;
1297d36db35SAvi Kivity 	for (int i = 100; i < 200; ++i)
1307d36db35SAvi Kivity 		m1[i] = (m3[i] = m2[i] = i) + 1;
1317d36db35SAvi Kivity 	test_cmps_one(m1, m3);
1327d36db35SAvi Kivity 	test_cmps_one(m1, m2);
1337d36db35SAvi Kivity }
1347d36db35SAvi Kivity 
135db4898e8SThomas Huth static void test_scas(void *mem)
13680a4ea7bSAvi Kivity {
13780a4ea7bSAvi Kivity     bool z;
13880a4ea7bSAvi Kivity     void *di;
13980a4ea7bSAvi Kivity 
140*bbdb7433SSean Christopherson     *(uint64_t *)mem = 0x77665544332211;
14180a4ea7bSAvi Kivity 
14280a4ea7bSAvi Kivity     di = mem;
14380a4ea7bSAvi Kivity     asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11));
144a299895bSThomas Huth     report(di == mem + 1 && z, "scasb match");
14580a4ea7bSAvi Kivity 
14680a4ea7bSAvi Kivity     di = mem;
14780a4ea7bSAvi Kivity     asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54));
148a299895bSThomas Huth     report(di == mem + 1 && !z, "scasb mismatch");
14980a4ea7bSAvi Kivity 
15080a4ea7bSAvi Kivity     di = mem;
15180a4ea7bSAvi Kivity     asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211));
152a299895bSThomas Huth     report(di == mem + 2 && z, "scasw match");
15380a4ea7bSAvi Kivity 
15480a4ea7bSAvi Kivity     di = mem;
15580a4ea7bSAvi Kivity     asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11));
156a299895bSThomas Huth     report(di == mem + 2 && !z, "scasw mismatch");
15780a4ea7bSAvi Kivity 
15880a4ea7bSAvi Kivity     di = mem;
159*bbdb7433SSean Christopherson     asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"((ulong)0xff44332211ul));
160a299895bSThomas Huth     report(di == mem + 4 && z, "scasd match");
16180a4ea7bSAvi Kivity 
16280a4ea7bSAvi Kivity     di = mem;
16380a4ea7bSAvi Kivity     asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211));
164a299895bSThomas Huth     report(di == mem + 4 && !z, "scasd mismatch");
16580a4ea7bSAvi Kivity 
166*bbdb7433SSean Christopherson #ifdef __x86_64__
16780a4ea7bSAvi Kivity     di = mem;
16880a4ea7bSAvi Kivity     asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul));
169a299895bSThomas Huth     report(di == mem + 8 && z, "scasq match");
17080a4ea7bSAvi Kivity 
17180a4ea7bSAvi Kivity     di = mem;
17280a4ea7bSAvi Kivity     asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3));
173a299895bSThomas Huth     report(di == mem + 8 && !z, "scasq mismatch");
174*bbdb7433SSean Christopherson #endif
1757d36db35SAvi Kivity }
1767d36db35SAvi Kivity 
177db4898e8SThomas Huth static void test_incdecnotneg(void *mem)
1787d36db35SAvi Kivity {
1797d36db35SAvi Kivity     unsigned long *m = mem, v = 1234;
1807d36db35SAvi Kivity     unsigned char *mb = mem, vb = 66;
1817d36db35SAvi Kivity 
1827d36db35SAvi Kivity     *m = 0;
1837d36db35SAvi Kivity 
1847d36db35SAvi Kivity     asm volatile ("incl %0":"+m"(*m));
185a299895bSThomas Huth     report(*m == 1, "incl");
1867d36db35SAvi Kivity     asm volatile ("decl %0":"+m"(*m));
187a299895bSThomas Huth     report(*m == 0, "decl");
1887d36db35SAvi Kivity     asm volatile ("incb %0":"+m"(*m));
189a299895bSThomas Huth     report(*m == 1, "incb");
1907d36db35SAvi Kivity     asm volatile ("decb %0":"+m"(*m));
191a299895bSThomas Huth     report(*m == 0, "decb");
1927d36db35SAvi Kivity 
1937d36db35SAvi Kivity     asm volatile ("lock incl %0":"+m"(*m));
194a299895bSThomas Huth     report(*m == 1, "lock incl");
1957d36db35SAvi Kivity     asm volatile ("lock decl %0":"+m"(*m));
196a299895bSThomas Huth     report(*m == 0, "lock decl");
1977d36db35SAvi Kivity     asm volatile ("lock incb %0":"+m"(*m));
198a299895bSThomas Huth     report(*m == 1, "lock incb");
1997d36db35SAvi Kivity     asm volatile ("lock decb %0":"+m"(*m));
200a299895bSThomas Huth     report(*m == 0, "lock decb");
2017d36db35SAvi Kivity 
2027d36db35SAvi Kivity     *m = v;
2037d36db35SAvi Kivity 
204*bbdb7433SSean Christopherson #ifdef __x86_64__
2057d36db35SAvi Kivity     asm ("lock negq %0" : "+m"(*m)); v = -v;
206a299895bSThomas Huth     report(*m == v, "lock negl");
2077d36db35SAvi Kivity     asm ("lock notq %0" : "+m"(*m)); v = ~v;
208a299895bSThomas Huth     report(*m == v, "lock notl");
209*bbdb7433SSean Christopherson #endif
2107d36db35SAvi Kivity 
2117d36db35SAvi Kivity     *mb = vb;
2127d36db35SAvi Kivity 
2137d36db35SAvi Kivity     asm ("lock negb %0" : "+m"(*mb)); vb = -vb;
214a299895bSThomas Huth     report(*mb == vb, "lock negb");
2157d36db35SAvi Kivity     asm ("lock notb %0" : "+m"(*mb)); vb = ~vb;
216a299895bSThomas Huth     report(*mb == vb, "lock notb");
2177d36db35SAvi Kivity }
2187d36db35SAvi Kivity 
219*bbdb7433SSean Christopherson static void test_smsw(unsigned long *h_mem)
2207d36db35SAvi Kivity {
2217d36db35SAvi Kivity 	char mem[16];
2227d36db35SAvi Kivity 	unsigned short msw, msw_orig, *pmsw;
2237d36db35SAvi Kivity 	int i, zero;
2247d36db35SAvi Kivity 
2257d36db35SAvi Kivity 	msw_orig = read_cr0();
2267d36db35SAvi Kivity 
2277d36db35SAvi Kivity 	asm("smsw %0" : "=r"(msw));
228a299895bSThomas Huth 	report(msw == msw_orig, "smsw (1)");
2297d36db35SAvi Kivity 
2307d36db35SAvi Kivity 	memset(mem, 0, 16);
2317d36db35SAvi Kivity 	pmsw = (void *)mem;
2327d36db35SAvi Kivity 	asm("smsw %0" : "=m"(pmsw[4]));
2337d36db35SAvi Kivity 	zero = 1;
2347d36db35SAvi Kivity 	for (i = 0; i < 8; ++i)
2357d36db35SAvi Kivity 		if (i != 4 && pmsw[i])
2367d36db35SAvi Kivity 			zero = 0;
237a299895bSThomas Huth 	report(msw == pmsw[4] && zero, "smsw (2)");
2384003963dSNadav Amit 
2394003963dSNadav Amit 	/* Trigger exit on smsw */
240*bbdb7433SSean Christopherson 	*h_mem = -1ul;
24111147080SChris J Arges 	asm volatile("smsw %0" : "+m"(*h_mem));
242a299895bSThomas Huth 	report(msw == (unsigned short)*h_mem &&
243*bbdb7433SSean Christopherson 	       (*h_mem & ~0xfffful) == (-1ul & ~0xfffful), "smsw (3)");
2447d36db35SAvi Kivity }
2457d36db35SAvi Kivity 
246db4898e8SThomas Huth static void test_lmsw(void)
2477d36db35SAvi Kivity {
2487d36db35SAvi Kivity 	char mem[16];
2497d36db35SAvi Kivity 	unsigned short msw, *pmsw;
2507d36db35SAvi Kivity 	unsigned long cr0;
2517d36db35SAvi Kivity 
2527d36db35SAvi Kivity 	cr0 = read_cr0();
2537d36db35SAvi Kivity 
2547d36db35SAvi Kivity 	msw = cr0 ^ 8;
2557d36db35SAvi Kivity 	asm("lmsw %0" : : "r"(msw));
2567d36db35SAvi Kivity 	printf("before %lx after %lx\n", cr0, read_cr0());
257a299895bSThomas Huth 	report((cr0 ^ read_cr0()) == 8, "lmsw (1)");
2587d36db35SAvi Kivity 
2597d36db35SAvi Kivity 	pmsw = (void *)mem;
2607d36db35SAvi Kivity 	*pmsw = cr0;
2617d36db35SAvi Kivity 	asm("lmsw %0" : : "m"(*pmsw));
2627d36db35SAvi Kivity 	printf("before %lx after %lx\n", cr0, read_cr0());
263a299895bSThomas Huth 	report(cr0 == read_cr0(), "lmsw (2)");
2647d36db35SAvi Kivity 
2657d36db35SAvi Kivity 	/* lmsw can't clear cr0.pe */
2667d36db35SAvi Kivity 	msw = (cr0 & ~1ul) ^ 4;  /* change EM to force trap */
2677d36db35SAvi Kivity 	asm("lmsw %0" : : "r"(msw));
268a299895bSThomas Huth 	report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)");
2697d36db35SAvi Kivity 
2707d36db35SAvi Kivity 	/* back to normal */
2717d36db35SAvi Kivity 	msw = cr0;
2727d36db35SAvi Kivity 	asm("lmsw %0" : : "r"(msw));
2737d36db35SAvi Kivity }
2747d36db35SAvi Kivity 
275db4898e8SThomas Huth static void test_btc(void *mem)
276d4655eafSWei Yongjun {
277d4655eafSWei Yongjun 	unsigned int *a = mem;
278d4655eafSWei Yongjun 
2797e083f20SNadav Amit 	memset(mem, 0, 4 * sizeof(unsigned int));
280d4655eafSWei Yongjun 
281d4655eafSWei Yongjun 	asm ("btcl $32, %0" :: "m"(a[0]) : "memory");
282d4655eafSWei Yongjun 	asm ("btcl $1, %0" :: "m"(a[1]) : "memory");
283d4655eafSWei Yongjun 	asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory");
284a299895bSThomas Huth 	report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m");
285d4655eafSWei Yongjun 
286d4655eafSWei Yongjun 	asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory");
287a299895bSThomas Huth 	report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m");
2887e083f20SNadav Amit 
289*bbdb7433SSean Christopherson #ifdef __x86_64__
2907e083f20SNadav Amit 	asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory");
291a299895bSThomas Huth 	report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0,
292a299895bSThomas Huth 	       "btcq reg, r/m");
293*bbdb7433SSean Christopherson #endif
294d4655eafSWei Yongjun }
295d4655eafSWei Yongjun 
296db4898e8SThomas Huth static void test_bsfbsr(void *mem)
2972e16c7f6SWei Yongjun {
298554de466SAvi Kivity 	unsigned eax, *meml = mem;
299554de466SAvi Kivity 	unsigned short ax, *memw = mem;
300*bbdb7433SSean Christopherson #ifdef __x86_64__
301*bbdb7433SSean Christopherson 	unsigned long rax, *memq = mem;
302554de466SAvi Kivity 	unsigned char z;
303*bbdb7433SSean Christopherson #endif
3042e16c7f6SWei Yongjun 
305554de466SAvi Kivity 	*memw = 0xc000;
306554de466SAvi Kivity 	asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
307a299895bSThomas Huth 	report(ax == 14, "bsfw r/m, reg");
3082e16c7f6SWei Yongjun 
309554de466SAvi Kivity 	*meml = 0xc0000000;
310554de466SAvi Kivity 	asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
311a299895bSThomas Huth 	report(eax == 30, "bsfl r/m, reg");
3122e16c7f6SWei Yongjun 
313*bbdb7433SSean Christopherson #ifdef __x86_64__
314554de466SAvi Kivity 	*memq = 0xc00000000000;
315554de466SAvi Kivity 	asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
316a299895bSThomas Huth 	report(rax == 46, "bsfq r/m, reg");
3172e16c7f6SWei Yongjun 
318554de466SAvi Kivity 	*memq = 0;
319554de466SAvi Kivity 	asm("bsfq %[mem], %[a]; setz %[z]"
320554de466SAvi Kivity 	    : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
321a299895bSThomas Huth 	report(z == 1, "bsfq r/m, reg");
322*bbdb7433SSean Christopherson #endif
3232e16c7f6SWei Yongjun 
324554de466SAvi Kivity 	*memw = 0xc000;
325554de466SAvi Kivity 	asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
326a299895bSThomas Huth 	report(ax == 15, "bsrw r/m, reg");
3272e16c7f6SWei Yongjun 
328554de466SAvi Kivity 	*meml = 0xc0000000;
329554de466SAvi Kivity 	asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
330a299895bSThomas Huth 	report(eax == 31, "bsrl r/m, reg");
3312e16c7f6SWei Yongjun 
332*bbdb7433SSean Christopherson #ifdef __x86_64__
333554de466SAvi Kivity 	*memq = 0xc00000000000;
334554de466SAvi Kivity 	asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
335a299895bSThomas Huth 	report(rax == 47, "bsrq r/m, reg");
3362e16c7f6SWei Yongjun 
337554de466SAvi Kivity 	*memq = 0;
338554de466SAvi Kivity 	asm("bsrq %[mem], %[a]; setz %[z]"
339554de466SAvi Kivity 	    : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
340a299895bSThomas Huth 	report(z == 1, "bsrq r/m, reg");
341*bbdb7433SSean Christopherson #endif
3422e16c7f6SWei Yongjun }
3432e16c7f6SWei Yongjun 
344*bbdb7433SSean Christopherson static void test_imul(uint64_t *mem)
34551d65a3cSAvi Kivity {
34651d65a3cSAvi Kivity 	ulong a;
34751d65a3cSAvi Kivity 
348*bbdb7433SSean Christopherson 	*mem = 51; a = 0x1234567812345678ULL & -1ul;;
34951d65a3cSAvi Kivity 	asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem));
350*bbdb7433SSean Christopherson 	report(a == (0x12345678123439e8ULL & -1ul), "imul ax, mem");
35151d65a3cSAvi Kivity 
352*bbdb7433SSean Christopherson 	*mem = 51; a = 0x1234567812345678ULL & -1ul;;
35351d65a3cSAvi Kivity 	asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem));
354a299895bSThomas Huth 	report(a == 0xa06d39e8, "imul eax, mem");
35551d65a3cSAvi Kivity 
356*bbdb7433SSean Christopherson 	*mem  = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul;
357*bbdb7433SSean Christopherson 	asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem));
358*bbdb7433SSean Christopherson 	report(a == (0x87654321876539e8ULL & -1ul), "imul ax, mem, imm8");
359*bbdb7433SSean Christopherson 
360*bbdb7433SSean Christopherson 	*mem = 0x1234567812345678ULL;
361*bbdb7433SSean Christopherson 	asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem));
362*bbdb7433SSean Christopherson 	report(a == 0xa06d39e8, "imul eax, mem, imm8");
363*bbdb7433SSean Christopherson 
364*bbdb7433SSean Christopherson 	*mem  = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul;
365*bbdb7433SSean Christopherson 	asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem));
366*bbdb7433SSean Christopherson 	report(a == (0x8765432187650bc8ULL & -1ul), "imul ax, mem, imm");
367*bbdb7433SSean Christopherson 
368*bbdb7433SSean Christopherson 	*mem = 0x1234567812345678ULL;
369*bbdb7433SSean Christopherson 	asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem));
370*bbdb7433SSean Christopherson 	report(a == 0x1d950bc8, "imul eax, mem, imm");
371*bbdb7433SSean Christopherson 
372*bbdb7433SSean Christopherson #ifdef __x86_64__
37351d65a3cSAvi Kivity 	*mem = 51; a = 0x1234567812345678UL;
37451d65a3cSAvi Kivity 	asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem));
375a299895bSThomas Huth 	report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem");
37651d65a3cSAvi Kivity 
37751d65a3cSAvi Kivity 	*mem = 0x1234567812345678UL;
37851d65a3cSAvi Kivity 	asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem));
379a299895bSThomas Huth 	report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem, imm8");
38051d65a3cSAvi Kivity 
38151d65a3cSAvi Kivity 	*mem = 0x1234567812345678UL;
38251d65a3cSAvi Kivity 	asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem));
383a299895bSThomas Huth 	report(a == 0x1D950BDE1D950BC8L, "imul rax, mem, imm");
384*bbdb7433SSean Christopherson #endif
38551d65a3cSAvi Kivity }
386d7f3ee3cSAvi Kivity typedef unsigned __attribute__((vector_size(16))) sse128;
387d7f3ee3cSAvi Kivity 
38893a3ae40SJacob Xu static bool sseeq(uint32_t *v1, uint32_t *v2)
389d7f3ee3cSAvi Kivity {
390d7f3ee3cSAvi Kivity     bool ok = true;
391d7f3ee3cSAvi Kivity     int i;
392d7f3ee3cSAvi Kivity 
393d7f3ee3cSAvi Kivity     for (i = 0; i < 4; ++i) {
39493a3ae40SJacob Xu 	ok &= v1[i] == v2[i];
395d7f3ee3cSAvi Kivity     }
396d7f3ee3cSAvi Kivity 
397d7f3ee3cSAvi Kivity     return ok;
398d7f3ee3cSAvi Kivity }
399d7f3ee3cSAvi Kivity 
40093a3ae40SJacob Xu static __attribute__((target("sse2"))) void test_sse(uint32_t *mem)
401d7f3ee3cSAvi Kivity {
40293a3ae40SJacob Xu 	sse128 vv;
40393a3ae40SJacob Xu 	uint32_t *v = (uint32_t *)&vv;
404d7f3ee3cSAvi Kivity 
405d7f3ee3cSAvi Kivity 	write_cr0(read_cr0() & ~6); /* EM, TS */
406d7f3ee3cSAvi Kivity 	write_cr4(read_cr4() | 0x200); /* OSFXSR */
40793a3ae40SJacob Xu 	memset(&vv, 0, sizeof(vv));
408290ed5d5SIgor Mammedov 
4098726f977SJacob Xu #define TEST_RW_SSE(insn) do { \
41093a3ae40SJacob Xu 		v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; \
41193a3ae40SJacob Xu 		asm(insn " %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); \
41293a3ae40SJacob Xu 		report(sseeq(v, mem), insn " (read)"); \
41393a3ae40SJacob Xu 		mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; \
41493a3ae40SJacob Xu 		asm(insn " %1, %0" : "=x"(vv) : "m"(*mem) : "memory"); \
41593a3ae40SJacob Xu 		report(sseeq(v, mem), insn " (write)"); \
4168726f977SJacob Xu } while (0)
417f068a46aSIgor Mammedov 
4188726f977SJacob Xu 	TEST_RW_SSE("movdqu");
4198726f977SJacob Xu 	TEST_RW_SSE("movaps");
4208726f977SJacob Xu 	TEST_RW_SSE("movapd");
4218726f977SJacob Xu 	TEST_RW_SSE("movups");
4228726f977SJacob Xu 	TEST_RW_SSE("movupd");
4238726f977SJacob Xu #undef TEST_RW_SSE
424d7f3ee3cSAvi Kivity }
425d7f3ee3cSAvi Kivity 
426e5e76263SJacob Xu static void unaligned_movaps_handler(struct ex_regs *regs)
427e5e76263SJacob Xu {
428e5e76263SJacob Xu 	extern char unaligned_movaps_cont;
429e5e76263SJacob Xu 
430e5e76263SJacob Xu 	++exceptions;
431e5e76263SJacob Xu 	regs->rip = (ulong)&unaligned_movaps_cont;
432e5e76263SJacob Xu }
433e5e76263SJacob Xu 
434e5e76263SJacob Xu static void cross_movups_handler(struct ex_regs *regs)
435e5e76263SJacob Xu {
436e5e76263SJacob Xu 	extern char cross_movups_cont;
437e5e76263SJacob Xu 
438e5e76263SJacob Xu 	++exceptions;
439e5e76263SJacob Xu 	regs->rip = (ulong)&cross_movups_cont;
440e5e76263SJacob Xu }
441e5e76263SJacob Xu 
442e5e76263SJacob Xu static __attribute__((target("sse2"))) void test_sse_exceptions(void *cross_mem)
443e5e76263SJacob Xu {
44493a3ae40SJacob Xu 	sse128 vv;
44593a3ae40SJacob Xu 	uint32_t *v = (uint32_t *)&vv;
44693a3ae40SJacob Xu 	uint32_t *mem;
447e5e76263SJacob Xu 	uint8_t *bytes = cross_mem; // aligned on PAGE_SIZE*2
448e5e76263SJacob Xu 	void *page2 = (void *)(&bytes[4096]);
449e5e76263SJacob Xu 	struct pte_search search;
450e5e76263SJacob Xu 	pteval_t orig_pte;
45115bfae71SMichal Luczaj 	handler old;
452e5e76263SJacob Xu 
453e5e76263SJacob Xu 	// setup memory for unaligned access
45493a3ae40SJacob Xu 	mem = (uint32_t *)(&bytes[8]);
455e5e76263SJacob Xu 
456e5e76263SJacob Xu 	// test unaligned access for movups, movupd and movaps
45793a3ae40SJacob Xu 	v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
45893a3ae40SJacob Xu 	mem[0] = 5; mem[1] = 6; mem[2] = 8; mem[3] = 9;
45993a3ae40SJacob Xu 	asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
46093a3ae40SJacob Xu 	report(sseeq(v, mem), "movups unaligned");
461e5e76263SJacob Xu 
46293a3ae40SJacob Xu 	v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
46393a3ae40SJacob Xu 	mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8;
46493a3ae40SJacob Xu 	asm("movupd %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
46593a3ae40SJacob Xu 	report(sseeq(v, mem), "movupd unaligned");
466e5e76263SJacob Xu 	exceptions = 0;
46715bfae71SMichal Luczaj 	old = handle_exception(GP_VECTOR, unaligned_movaps_handler);
468e5e76263SJacob Xu 	asm("movaps %1, %0\n\t unaligned_movaps_cont:"
46993a3ae40SJacob Xu 			: "=m"(*mem) : "x"(vv));
47015bfae71SMichal Luczaj 	handle_exception(GP_VECTOR, old);
471e5e76263SJacob Xu 	report(exceptions == 1, "unaligned movaps exception");
472e5e76263SJacob Xu 
473e5e76263SJacob Xu 	// setup memory for cross page access
47493a3ae40SJacob Xu 	mem = (uint32_t *)(&bytes[4096-8]);
47593a3ae40SJacob Xu 	v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
47693a3ae40SJacob Xu 	mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8;
477e5e76263SJacob Xu 
47893a3ae40SJacob Xu 	asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
47993a3ae40SJacob Xu 	report(sseeq(v, mem), "movups unaligned crosspage");
480e5e76263SJacob Xu 
481e5e76263SJacob Xu 	// invalidate second page
482e5e76263SJacob Xu 	search = find_pte_level(current_page_table(), page2, 1);
483e5e76263SJacob Xu 	orig_pte = *search.pte;
484e5e76263SJacob Xu 	install_pte(current_page_table(), 1, page2, 0, NULL);
485e5e76263SJacob Xu 	invlpg(page2);
486e5e76263SJacob Xu 
487e5e76263SJacob Xu 	exceptions = 0;
48815bfae71SMichal Luczaj 	old = handle_exception(PF_VECTOR, cross_movups_handler);
48993a3ae40SJacob Xu 	asm("movups %1, %0\n\t cross_movups_cont:" : "=m"(*mem) : "x"(vv) :
49093a3ae40SJacob Xu 			"memory");
49115bfae71SMichal Luczaj 	handle_exception(PF_VECTOR, old);
492e5e76263SJacob Xu 	report(exceptions == 1, "movups crosspage exception");
493e5e76263SJacob Xu 
494e5e76263SJacob Xu 	// restore invalidated page
495e5e76263SJacob Xu 	install_pte(current_page_table(), 1, page2, orig_pte, NULL);
496e5e76263SJacob Xu }
497e5e76263SJacob Xu 
498b212fcdaSAvi Kivity static void test_shld_shrd(u32 *mem)
499b212fcdaSAvi Kivity {
500b212fcdaSAvi Kivity     *mem = 0x12345678;
501b212fcdaSAvi Kivity     asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3));
502a299895bSThomas Huth     report(*mem == ((0x12345678 << 3) | 5), "shld (cl)");
503b212fcdaSAvi Kivity     *mem = 0x12345678;
504b212fcdaSAvi Kivity     asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3));
505a299895bSThomas Huth     report(*mem == ((0x12345678 >> 3) | (5u << 29)), "shrd (cl)");
506b212fcdaSAvi Kivity }
507b212fcdaSAvi Kivity 
50845fdc228SPaolo Bonzini static void test_smsw_reg(uint64_t *mem)
509313f4efeSNadav Amit {
510313f4efeSNadav Amit 	unsigned long cr0 = read_cr0();
51145fdc228SPaolo Bonzini 	unsigned long rax;
512*bbdb7433SSean Christopherson 	const unsigned long in_rax = 0x1234567890abcdefull & -1ul;
513313f4efeSNadav Amit 
51445fdc228SPaolo Bonzini 	asm(KVM_FEP "smsww %w0\n\t" : "=a" (rax) : "0" (in_rax));
515a299895bSThomas Huth 	report((u16)rax == (u16)cr0 && rax >> 16 == in_rax >> 16,
516a299895bSThomas Huth 	       "16-bit smsw reg");
517313f4efeSNadav Amit 
51845fdc228SPaolo Bonzini 	asm(KVM_FEP "smswl %k0\n\t" : "=a" (rax) : "0" (in_rax));
519a299895bSThomas Huth 	report(rax == (u32)cr0, "32-bit smsw reg");
520313f4efeSNadav Amit 
521*bbdb7433SSean Christopherson #ifdef __x86_64__
5222f394044SBill Wendling 	asm(KVM_FEP "smswq %q0\n\t" : "=a" (rax) : "0" (in_rax));
523a299895bSThomas Huth 	report(rax == cr0, "64-bit smsw reg");
524*bbdb7433SSean Christopherson #endif
525313f4efeSNadav Amit }
526313f4efeSNadav Amit 
52745fdc228SPaolo Bonzini static void test_nop(uint64_t *mem)
528ae399010SNadav Amit {
52945fdc228SPaolo Bonzini 	unsigned long rax;
530*bbdb7433SSean Christopherson 	const unsigned long in_rax = 0x12345678ul;
53145fdc228SPaolo Bonzini 	asm(KVM_FEP "nop\n\t" : "=a" (rax) : "0" (in_rax));
532a299895bSThomas Huth 	report(rax == in_rax, "nop");
533ae399010SNadav Amit }
534ae399010SNadav Amit 
53545fdc228SPaolo Bonzini static void test_mov_dr(uint64_t *mem)
536b39a3e14SNadav Amit {
53745fdc228SPaolo Bonzini 	unsigned long rax;
538f1dcfd54SSean Christopherson 
539*bbdb7433SSean Christopherson 	asm(KVM_FEP "mov %0, %%dr6\n\t"
540*bbdb7433SSean Christopherson 	    KVM_FEP "mov %%dr6, %0\n\t" : "=a" (rax) : "a" (0));
541f1dcfd54SSean Christopherson 
542f1dcfd54SSean Christopherson 	if (this_cpu_has(X86_FEATURE_RTM))
543f1dcfd54SSean Christopherson 		report(rax == (DR6_ACTIVE_LOW & ~DR6_RTM), "mov_dr6");
544f1dcfd54SSean Christopherson 	else
545f1dcfd54SSean Christopherson 		report(rax == DR6_ACTIVE_LOW, "mov_dr6");
546b39a3e14SNadav Amit }
547b39a3e14SNadav Amit 
5480dcb3fbaSMichal Luczaj static void test_illegal_lea(void)
5490dcb3fbaSMichal Luczaj {
5500dcb3fbaSMichal Luczaj 	unsigned int vector;
5510dcb3fbaSMichal Luczaj 
5520dcb3fbaSMichal Luczaj 	asm volatile (ASM_TRY_FEP("1f")
5530dcb3fbaSMichal Luczaj 		      ".byte 0x8d; .byte 0xc0\n\t"
5540dcb3fbaSMichal Luczaj 		      "1:"
5550dcb3fbaSMichal Luczaj 		      : : : "memory", "eax");
5560dcb3fbaSMichal Luczaj 
5570dcb3fbaSMichal Luczaj 	vector = exception_vector();
5580dcb3fbaSMichal Luczaj 	report(vector == UD_VECTOR,
5590dcb3fbaSMichal Luczaj 	       "Wanted #UD on LEA with /reg, got vector = %u", vector);
5600dcb3fbaSMichal Luczaj }
5610dcb3fbaSMichal Luczaj 
562ec278ce3SAvi Kivity static void test_crosspage_mmio(volatile uint8_t *mem)
563ec278ce3SAvi Kivity {
564ec278ce3SAvi Kivity     volatile uint16_t w, *pw;
565ec278ce3SAvi Kivity 
566ec278ce3SAvi Kivity     pw = (volatile uint16_t *)&mem[4095];
567ec278ce3SAvi Kivity     mem[4095] = 0x99;
568ec278ce3SAvi Kivity     mem[4096] = 0x77;
569ec278ce3SAvi Kivity     asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory");
570a299895bSThomas Huth     report(w == 0x7799, "cross-page mmio read");
571ec278ce3SAvi Kivity     asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa));
572a299895bSThomas Huth     report(mem[4095] == 0xaa && mem[4096] == 0x88, "cross-page mmio write");
573ec278ce3SAvi Kivity }
574ec278ce3SAvi Kivity 
575a19c7db7SXiao Guangrong static void test_string_io_mmio(volatile uint8_t *mem)
576a19c7db7SXiao Guangrong {
577a19c7db7SXiao Guangrong 	/* Cross MMIO pages.*/
578a19c7db7SXiao Guangrong 	volatile uint8_t *mmio = mem + 4032;
579a19c7db7SXiao Guangrong 
580a19c7db7SXiao Guangrong 	asm volatile("outw %%ax, %%dx  \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT));
581a19c7db7SXiao Guangrong 
582a19c7db7SXiao Guangrong 	asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024));
583a19c7db7SXiao Guangrong 
584a299895bSThomas Huth 	report(mmio[1023] == 0x99, "string_io_mmio");
585a19c7db7SXiao Guangrong }
586a19c7db7SXiao Guangrong 
58756c6afa7SJan Kiszka /* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */
58856c6afa7SJan Kiszka #if 0
58947c1461aSAvi Kivity static void test_lgdt_lidt(volatile uint8_t *mem)
59047c1461aSAvi Kivity {
59147c1461aSAvi Kivity     struct descriptor_table_ptr orig, fresh = {};
59247c1461aSAvi Kivity 
59347c1461aSAvi Kivity     sgdt(&orig);
59447c1461aSAvi Kivity     *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
59547c1461aSAvi Kivity 	.limit = 0xf234,
59647c1461aSAvi Kivity 	.base = 0x12345678abcd,
59747c1461aSAvi Kivity     };
59847c1461aSAvi Kivity     cli();
59947c1461aSAvi Kivity     asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
60047c1461aSAvi Kivity     sgdt(&fresh);
60147c1461aSAvi Kivity     lgdt(&orig);
60247c1461aSAvi Kivity     sti();
603a299895bSThomas Huth     report(orig.limit == fresh.limit && orig.base == fresh.base,
604a299895bSThomas Huth            "lgdt (long address)");
60547c1461aSAvi Kivity 
60647c1461aSAvi Kivity     sidt(&orig);
60747c1461aSAvi Kivity     *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
60847c1461aSAvi Kivity 	.limit = 0x432f,
60947c1461aSAvi Kivity 	.base = 0xdbca87654321,
61047c1461aSAvi Kivity     };
61147c1461aSAvi Kivity     cli();
61247c1461aSAvi Kivity     asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
61347c1461aSAvi Kivity     sidt(&fresh);
61447c1461aSAvi Kivity     lidt(&orig);
61547c1461aSAvi Kivity     sti();
616a299895bSThomas Huth     report(orig.limit == fresh.limit && orig.base == fresh.base,
617a299895bSThomas Huth            "lidt (long address)");
61847c1461aSAvi Kivity }
61956c6afa7SJan Kiszka #endif
62047c1461aSAvi Kivity 
6214425dba6SPeter Feiner /* Broken emulation causes triple fault, which skips the other tests. */
6224425dba6SPeter Feiner #if 0
623cb615a4dSAvi Kivity static void test_lldt(volatile uint16_t *mem)
624cb615a4dSAvi Kivity {
6254425dba6SPeter Feiner     u64 gdt[] = { 0, /* null descriptor */
6264425dba6SPeter Feiner #ifdef __X86_64__
6274425dba6SPeter Feiner 		  0, /* ldt descriptor is 16 bytes in long mode */
6284425dba6SPeter Feiner #endif
6294425dba6SPeter Feiner 		  0x0000f82000000ffffull /* ldt descriptor */ };
6304425dba6SPeter Feiner     struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1,
6314425dba6SPeter Feiner 					    .base = (ulong)&gdt };
632cb615a4dSAvi Kivity     struct descriptor_table_ptr orig_gdt;
633cb615a4dSAvi Kivity 
634cb615a4dSAvi Kivity     cli();
635cb615a4dSAvi Kivity     sgdt(&orig_gdt);
636cb615a4dSAvi Kivity     lgdt(&gdt_ptr);
637cb615a4dSAvi Kivity     *mem = 0x8;
638cb615a4dSAvi Kivity     asm volatile("lldt %0" : : "m"(*mem));
639cb615a4dSAvi Kivity     lgdt(&orig_gdt);
640cb615a4dSAvi Kivity     sti();
641a299895bSThomas Huth     report(sldt() == *mem, "lldt");
642cb615a4dSAvi Kivity }
6434425dba6SPeter Feiner #endif
644cb615a4dSAvi Kivity 
64558a9d81eSAvi Kivity static void test_ltr(volatile uint16_t *mem)
64658a9d81eSAvi Kivity {
64758a9d81eSAvi Kivity     struct descriptor_table_ptr gdt_ptr;
64858a9d81eSAvi Kivity     uint64_t *gdt, *trp;
64958a9d81eSAvi Kivity     uint16_t tr = str();
65058a9d81eSAvi Kivity     uint64_t busy_mask = (uint64_t)1 << 41;
65158a9d81eSAvi Kivity 
65258a9d81eSAvi Kivity     sgdt(&gdt_ptr);
65358a9d81eSAvi Kivity     gdt = (uint64_t *)gdt_ptr.base;
65458a9d81eSAvi Kivity     trp = &gdt[tr >> 3];
65558a9d81eSAvi Kivity     *trp &= ~busy_mask;
65658a9d81eSAvi Kivity     *mem = tr;
65758a9d81eSAvi Kivity     asm volatile("ltr %0" : : "m"(*mem) : "memory");
658a299895bSThomas Huth     report(str() == tr && (*trp & busy_mask), "ltr");
65958a9d81eSAvi Kivity }
66058a9d81eSAvi Kivity 
661215ad64cSSean Christopherson static void test_mov(void *mem)
662215ad64cSSean Christopherson {
663215ad64cSSean Christopherson 	unsigned long t1, t2;
664215ad64cSSean Christopherson 
665215ad64cSSean Christopherson 	// test mov reg, r/m and mov r/m, reg
666*bbdb7433SSean Christopherson 	t1 = 0x123456789abcdefull & -1ul;
667215ad64cSSean Christopherson 	asm volatile("mov %[t1], (%[mem]) \n\t"
668215ad64cSSean Christopherson 		     "mov (%[mem]), %[t2]"
669215ad64cSSean Christopherson 		     : [t2]"=r"(t2)
670215ad64cSSean Christopherson 		     : [t1]"r"(t1), [mem]"r"(mem)
671215ad64cSSean Christopherson 		     : "memory");
672*bbdb7433SSean Christopherson 	report(t2 == (0x123456789abcdefull & -1ul), "mov reg, r/m (1)");
673215ad64cSSean Christopherson }
674215ad64cSSean Christopherson 
6756cff92ddSAvi Kivity static void test_simplealu(u32 *mem)
6766cff92ddSAvi Kivity {
6776cff92ddSAvi Kivity     *mem = 0x1234;
6786cff92ddSAvi Kivity     asm("or %1, %0" : "+m"(*mem) : "r"(0x8001));
679a299895bSThomas Huth     report(*mem == 0x9235, "or");
6806cff92ddSAvi Kivity     asm("add %1, %0" : "+m"(*mem) : "r"(2));
681a299895bSThomas Huth     report(*mem == 0x9237, "add");
6826cff92ddSAvi Kivity     asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111));
683a299895bSThomas Huth     report(*mem == 0x8326, "xor");
6846cff92ddSAvi Kivity     asm("sub %1, %0" : "+m"(*mem) : "r"(0x26));
685a299895bSThomas Huth     report(*mem == 0x8300, "sub");
6866cff92ddSAvi Kivity     asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
687a299895bSThomas Huth     report(*mem == 0x8400, "adc(0)");
6886cff92ddSAvi Kivity     asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
689a299895bSThomas Huth     report(*mem == 0x8501, "adc(0)");
6906cff92ddSAvi Kivity     asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0));
691a299895bSThomas Huth     report(*mem == 0x8501, "sbb(0)");
6926cff92ddSAvi Kivity     asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0));
693a299895bSThomas Huth     report(*mem == 0x8500, "sbb(1)");
6946cff92ddSAvi Kivity     asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77));
695a299895bSThomas Huth     report(*mem == 0x8400, "and");
6966cff92ddSAvi Kivity     asm("test %1, %0" : "+m"(*mem) : "r"(0xf000));
697a299895bSThomas Huth     report(*mem == 0x8400, "test");
6986cff92ddSAvi Kivity }
6996cff92ddSAvi Kivity 
70070bdcadbSNadav Amit static void test_illegal_movbe(void)
70170bdcadbSNadav Amit {
7023af47210SMichal Luczaj 	unsigned int vector;
7033af47210SMichal Luczaj 
704badc98caSKrish Sadhukhan 	if (!this_cpu_has(X86_FEATURE_MOVBE)) {
7053af47210SMichal Luczaj 		report_skip("MOVBE unsupported by CPU");
70670bdcadbSNadav Amit 		return;
70770bdcadbSNadav Amit 	}
70870bdcadbSNadav Amit 
7093af47210SMichal Luczaj 	asm volatile(ASM_TRY("1f")
7103af47210SMichal Luczaj 		     ".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t"
7113af47210SMichal Luczaj 		     "1:"
7123af47210SMichal Luczaj 		     : : : "memory", "rax");
7133af47210SMichal Luczaj 
7143af47210SMichal Luczaj 	vector = exception_vector();
7153af47210SMichal Luczaj 	report(vector == UD_VECTOR,
7163af47210SMichal Luczaj 	       "Wanted #UD on MOVBE with /reg, got vector = %u", vector);
71770bdcadbSNadav Amit }
71870bdcadbSNadav Amit 
7197db17e21SThomas Huth int main(void)
7207d36db35SAvi Kivity {
7217d36db35SAvi Kivity 	void *mem;
722e5e76263SJacob Xu 	void *cross_mem;
7237d36db35SAvi Kivity 
724*bbdb7433SSean Christopherson 	if (!is_fep_available())
725*bbdb7433SSean Christopherson 		report_skip("Skipping tests the require forced emulation, "
726*bbdb7433SSean Christopherson 			    "use kvm.force_emulation_prefix=1 to enable");
727*bbdb7433SSean Christopherson 
7287d36db35SAvi Kivity 	setup_vm();
72945fdc228SPaolo Bonzini 
730ec278ce3SAvi Kivity 	mem = alloc_vpages(2);
731ec278ce3SAvi Kivity 	install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem);
732ec278ce3SAvi Kivity 	// install the page twice to test cross-page mmio
733ec278ce3SAvi Kivity 	install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096);
734e5e76263SJacob Xu 	cross_mem = vmap(virt_to_phys(alloc_pages(2)), 2 * PAGE_SIZE);
7357d36db35SAvi Kivity 
736215ad64cSSean Christopherson 	test_mov(mem);
7376cff92ddSAvi Kivity 	test_simplealu(mem);
7387d36db35SAvi Kivity 	test_cmps(mem);
73980a4ea7bSAvi Kivity 	test_scas(mem);
7404003963dSNadav Amit 	test_smsw(mem);
7417d36db35SAvi Kivity 	test_lmsw();
7427d36db35SAvi Kivity 	test_stringio();
7437d36db35SAvi Kivity 	test_incdecnotneg(mem);
744d4655eafSWei Yongjun 	test_btc(mem);
7452e16c7f6SWei Yongjun 	test_bsfbsr(mem);
74651d65a3cSAvi Kivity 	test_imul(mem);
747d7f3ee3cSAvi Kivity 	test_sse(mem);
748e5e76263SJacob Xu 	test_sse_exceptions(cross_mem);
749b212fcdaSAvi Kivity 	test_shld_shrd(mem);
75047c1461aSAvi Kivity 	//test_lgdt_lidt(mem);
7514425dba6SPeter Feiner 	//test_lldt(mem);
75258a9d81eSAvi Kivity 	test_ltr(mem);
7537d36db35SAvi Kivity 
7547948d4b6SSean Christopherson 	if (is_fep_available()) {
75545fdc228SPaolo Bonzini 		test_smsw_reg(mem);
75645fdc228SPaolo Bonzini 		test_nop(mem);
75745fdc228SPaolo Bonzini 		test_mov_dr(mem);
7580dcb3fbaSMichal Luczaj 		test_illegal_lea();
75945fdc228SPaolo Bonzini 	}
76045fdc228SPaolo Bonzini 
761ec278ce3SAvi Kivity 	test_crosspage_mmio(mem);
762ec278ce3SAvi Kivity 
763a19c7db7SXiao Guangrong 	test_string_io_mmio(mem);
76470bdcadbSNadav Amit 	test_illegal_movbe();
765f413c1afSNadav Amit 
766*bbdb7433SSean Christopherson #ifdef __x86_64__
767*bbdb7433SSean Christopherson 	test_emulator_64(mem);
768*bbdb7433SSean Christopherson #endif
769f3cdd159SJan Kiszka 	return report_summary();
7707d36db35SAvi Kivity }
771