1f1dcfd54SSean Christopherson #include <asm/debugreg.h>
2f1dcfd54SSean Christopherson
37d36db35SAvi Kivity #include "ioram.h"
47d36db35SAvi Kivity #include "vm.h"
57d36db35SAvi Kivity #include "libcflat.h"
6e7c37968SGleb Natapov #include "desc.h"
7b39a3e14SNadav Amit #include "processor.h"
8efd8e5aaSPaolo Bonzini #include "vmalloc.h"
95aca024eSPaolo Bonzini #include "alloc_page.h"
103ee1b91bSBin Meng #include "usermode.h"
117d36db35SAvi Kivity
127d36db35SAvi Kivity #define TESTDEV_IO_PORT 0xe0
137d36db35SAvi Kivity
14d7143f32SAvi Kivity static int exceptions;
15d7143f32SAvi Kivity
16bbdb7433SSean Christopherson #ifdef __x86_64__
17bbdb7433SSean Christopherson #include "emulator64.c"
18bbdb7433SSean Christopherson #endif
19bbdb7433SSean Christopherson
207d36db35SAvi Kivity static char st1[] = "abcdefghijklmnop";
217d36db35SAvi Kivity
test_stringio(void)227db17e21SThomas Huth static void test_stringio(void)
237d36db35SAvi Kivity {
247d36db35SAvi Kivity unsigned char r = 0;
257d36db35SAvi Kivity asm volatile("cld \n\t"
267d36db35SAvi Kivity "movw %0, %%dx \n\t"
277d36db35SAvi Kivity "rep outsb \n\t"
287d36db35SAvi Kivity : : "i"((short)TESTDEV_IO_PORT),
297d36db35SAvi Kivity "S"(st1), "c"(sizeof(st1) - 1));
307d36db35SAvi Kivity asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
31a299895bSThomas Huth report(r == st1[sizeof(st1) - 2], "outsb up"); /* last char */
327d36db35SAvi Kivity
337d36db35SAvi Kivity asm volatile("std \n\t"
347d36db35SAvi Kivity "movw %0, %%dx \n\t"
357d36db35SAvi Kivity "rep outsb \n\t"
367d36db35SAvi Kivity : : "i"((short)TESTDEV_IO_PORT),
377d36db35SAvi Kivity "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1));
387d36db35SAvi Kivity asm volatile("cld \n\t" : : );
397d36db35SAvi Kivity asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT));
40a299895bSThomas Huth report(r == st1[0], "outsb down");
417d36db35SAvi Kivity }
427d36db35SAvi Kivity
test_cmps_one(unsigned char * m1,unsigned char * m3)43db4898e8SThomas Huth static void test_cmps_one(unsigned char *m1, unsigned char *m3)
447d36db35SAvi Kivity {
457d36db35SAvi Kivity void *rsi, *rdi;
467d36db35SAvi Kivity long rcx, tmp;
477d36db35SAvi Kivity
487d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 30;
497d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
502d331a4dSRoman Bolshakov "repe cmpsb"
517d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
527d36db35SAvi Kivity : : "cc");
53a299895bSThomas Huth report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)");
547d36db35SAvi Kivity
5551ba4180SAvi Kivity rsi = m1; rdi = m3; rcx = 30;
5651ba4180SAvi Kivity asm volatile("or $1, %[tmp]\n\t" // clear ZF
572d331a4dSRoman Bolshakov "repe cmpsb"
5851ba4180SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
5951ba4180SAvi Kivity : : "cc");
60a299895bSThomas Huth report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30,
612d331a4dSRoman Bolshakov "repe cmpsb (1.zf)");
6251ba4180SAvi Kivity
637d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 15;
647d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
652d331a4dSRoman Bolshakov "repe cmpsw"
667d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
677d36db35SAvi Kivity : : "cc");
682d331a4dSRoman Bolshakov report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)");
697d36db35SAvi Kivity
707d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 7;
717d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
722d331a4dSRoman Bolshakov "repe cmpsl"
737d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
747d36db35SAvi Kivity : : "cc");
752d331a4dSRoman Bolshakov report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)");
767d36db35SAvi Kivity
77bbdb7433SSean Christopherson #ifdef __x86_64__
787d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 4;
797d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
802d331a4dSRoman Bolshakov "repe cmpsq"
817d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
827d36db35SAvi Kivity : : "cc");
832d331a4dSRoman Bolshakov report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)");
84bbdb7433SSean Christopherson #endif
857d36db35SAvi Kivity
867d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 130;
877d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
882d331a4dSRoman Bolshakov "repe cmpsb"
897d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
907d36db35SAvi Kivity : : "cc");
91a299895bSThomas Huth report(rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101,
922d331a4dSRoman Bolshakov "repe cmpsb (2)");
937d36db35SAvi Kivity
947d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 65;
957d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
962d331a4dSRoman Bolshakov "repe cmpsw"
977d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
987d36db35SAvi Kivity : : "cc");
99a299895bSThomas Huth report(rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102,
1002d331a4dSRoman Bolshakov "repe cmpsw (2)");
1017d36db35SAvi Kivity
1027d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 32;
1037d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
1042d331a4dSRoman Bolshakov "repe cmpsl"
1057d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
1067d36db35SAvi Kivity : : "cc");
107a299895bSThomas Huth report(rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104,
1082d331a4dSRoman Bolshakov "repe cmpll (2)");
1097d36db35SAvi Kivity
110bbdb7433SSean Christopherson #ifdef __x86_64__
1117d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 16;
1127d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t"
1132d331a4dSRoman Bolshakov "repe cmpsq"
1147d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp)
1157d36db35SAvi Kivity : : "cc");
116a299895bSThomas Huth report(rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104,
1172d331a4dSRoman Bolshakov "repe cmpsq (2)");
118bbdb7433SSean Christopherson #endif
1197d36db35SAvi Kivity }
1207d36db35SAvi Kivity
test_cmps(void * mem)121db4898e8SThomas Huth static void test_cmps(void *mem)
1227d36db35SAvi Kivity {
1237d36db35SAvi Kivity unsigned char *m1 = mem, *m2 = mem + 1024;
1247d36db35SAvi Kivity unsigned char m3[1024];
1257d36db35SAvi Kivity
1267d36db35SAvi Kivity for (int i = 0; i < 100; ++i)
1277d36db35SAvi Kivity m1[i] = m2[i] = m3[i] = i;
1287d36db35SAvi Kivity for (int i = 100; i < 200; ++i)
1297d36db35SAvi Kivity m1[i] = (m3[i] = m2[i] = i) + 1;
1307d36db35SAvi Kivity test_cmps_one(m1, m3);
1317d36db35SAvi Kivity test_cmps_one(m1, m2);
1327d36db35SAvi Kivity }
1337d36db35SAvi Kivity
test_scas(void * mem)134db4898e8SThomas Huth static void test_scas(void *mem)
13580a4ea7bSAvi Kivity {
13680a4ea7bSAvi Kivity bool z;
13780a4ea7bSAvi Kivity void *di;
13880a4ea7bSAvi Kivity
139bbdb7433SSean Christopherson *(uint64_t *)mem = 0x77665544332211;
14080a4ea7bSAvi Kivity
14180a4ea7bSAvi Kivity di = mem;
14280a4ea7bSAvi Kivity asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11));
143a299895bSThomas Huth report(di == mem + 1 && z, "scasb match");
14480a4ea7bSAvi Kivity
14580a4ea7bSAvi Kivity di = mem;
14680a4ea7bSAvi Kivity asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54));
147a299895bSThomas Huth report(di == mem + 1 && !z, "scasb mismatch");
14880a4ea7bSAvi Kivity
14980a4ea7bSAvi Kivity di = mem;
15080a4ea7bSAvi Kivity asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211));
151a299895bSThomas Huth report(di == mem + 2 && z, "scasw match");
15280a4ea7bSAvi Kivity
15380a4ea7bSAvi Kivity di = mem;
15480a4ea7bSAvi Kivity asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11));
155a299895bSThomas Huth report(di == mem + 2 && !z, "scasw mismatch");
15680a4ea7bSAvi Kivity
15780a4ea7bSAvi Kivity di = mem;
158bbdb7433SSean Christopherson asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"((ulong)0xff44332211ul));
159a299895bSThomas Huth report(di == mem + 4 && z, "scasd match");
16080a4ea7bSAvi Kivity
16180a4ea7bSAvi Kivity di = mem;
16280a4ea7bSAvi Kivity asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211));
163a299895bSThomas Huth report(di == mem + 4 && !z, "scasd mismatch");
16480a4ea7bSAvi Kivity
165bbdb7433SSean Christopherson #ifdef __x86_64__
16680a4ea7bSAvi Kivity di = mem;
16780a4ea7bSAvi Kivity asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul));
168a299895bSThomas Huth report(di == mem + 8 && z, "scasq match");
16980a4ea7bSAvi Kivity
17080a4ea7bSAvi Kivity di = mem;
17180a4ea7bSAvi Kivity asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3));
172a299895bSThomas Huth report(di == mem + 8 && !z, "scasq mismatch");
173bbdb7433SSean Christopherson #endif
1747d36db35SAvi Kivity }
1757d36db35SAvi Kivity
test_incdecnotneg(void * mem)176db4898e8SThomas Huth static void test_incdecnotneg(void *mem)
1777d36db35SAvi Kivity {
1787d36db35SAvi Kivity unsigned long *m = mem, v = 1234;
1797d36db35SAvi Kivity unsigned char *mb = mem, vb = 66;
1807d36db35SAvi Kivity
1817d36db35SAvi Kivity *m = 0;
1827d36db35SAvi Kivity
1837d36db35SAvi Kivity asm volatile ("incl %0":"+m"(*m));
184a299895bSThomas Huth report(*m == 1, "incl");
1857d36db35SAvi Kivity asm volatile ("decl %0":"+m"(*m));
186a299895bSThomas Huth report(*m == 0, "decl");
1877d36db35SAvi Kivity asm volatile ("incb %0":"+m"(*m));
188a299895bSThomas Huth report(*m == 1, "incb");
1897d36db35SAvi Kivity asm volatile ("decb %0":"+m"(*m));
190a299895bSThomas Huth report(*m == 0, "decb");
1917d36db35SAvi Kivity
1927d36db35SAvi Kivity asm volatile ("lock incl %0":"+m"(*m));
193a299895bSThomas Huth report(*m == 1, "lock incl");
1947d36db35SAvi Kivity asm volatile ("lock decl %0":"+m"(*m));
195a299895bSThomas Huth report(*m == 0, "lock decl");
1967d36db35SAvi Kivity asm volatile ("lock incb %0":"+m"(*m));
197a299895bSThomas Huth report(*m == 1, "lock incb");
1987d36db35SAvi Kivity asm volatile ("lock decb %0":"+m"(*m));
199a299895bSThomas Huth report(*m == 0, "lock decb");
2007d36db35SAvi Kivity
2017d36db35SAvi Kivity *m = v;
2027d36db35SAvi Kivity
203bbdb7433SSean Christopherson #ifdef __x86_64__
2047d36db35SAvi Kivity asm ("lock negq %0" : "+m"(*m)); v = -v;
205a299895bSThomas Huth report(*m == v, "lock negl");
2067d36db35SAvi Kivity asm ("lock notq %0" : "+m"(*m)); v = ~v;
207a299895bSThomas Huth report(*m == v, "lock notl");
208bbdb7433SSean Christopherson #endif
2097d36db35SAvi Kivity
2107d36db35SAvi Kivity *mb = vb;
2117d36db35SAvi Kivity
2127d36db35SAvi Kivity asm ("lock negb %0" : "+m"(*mb)); vb = -vb;
213a299895bSThomas Huth report(*mb == vb, "lock negb");
2147d36db35SAvi Kivity asm ("lock notb %0" : "+m"(*mb)); vb = ~vb;
215a299895bSThomas Huth report(*mb == vb, "lock notb");
2167d36db35SAvi Kivity }
2177d36db35SAvi Kivity
test_smsw(unsigned long * h_mem)218bbdb7433SSean Christopherson static void test_smsw(unsigned long *h_mem)
2197d36db35SAvi Kivity {
2207d36db35SAvi Kivity char mem[16];
2217d36db35SAvi Kivity unsigned short msw, msw_orig, *pmsw;
2227d36db35SAvi Kivity int i, zero;
2237d36db35SAvi Kivity
2247d36db35SAvi Kivity msw_orig = read_cr0();
2257d36db35SAvi Kivity
2267d36db35SAvi Kivity asm("smsw %0" : "=r"(msw));
227a299895bSThomas Huth report(msw == msw_orig, "smsw (1)");
2287d36db35SAvi Kivity
2297d36db35SAvi Kivity memset(mem, 0, 16);
2307d36db35SAvi Kivity pmsw = (void *)mem;
2317d36db35SAvi Kivity asm("smsw %0" : "=m"(pmsw[4]));
2327d36db35SAvi Kivity zero = 1;
2337d36db35SAvi Kivity for (i = 0; i < 8; ++i)
2347d36db35SAvi Kivity if (i != 4 && pmsw[i])
2357d36db35SAvi Kivity zero = 0;
236a299895bSThomas Huth report(msw == pmsw[4] && zero, "smsw (2)");
2374003963dSNadav Amit
2384003963dSNadav Amit /* Trigger exit on smsw */
239bbdb7433SSean Christopherson *h_mem = -1ul;
24011147080SChris J Arges asm volatile("smsw %0" : "+m"(*h_mem));
241a299895bSThomas Huth report(msw == (unsigned short)*h_mem &&
242bbdb7433SSean Christopherson (*h_mem & ~0xfffful) == (-1ul & ~0xfffful), "smsw (3)");
2437d36db35SAvi Kivity }
2447d36db35SAvi Kivity
test_lmsw(void)245db4898e8SThomas Huth static void test_lmsw(void)
2467d36db35SAvi Kivity {
2477d36db35SAvi Kivity char mem[16];
2487d36db35SAvi Kivity unsigned short msw, *pmsw;
2497d36db35SAvi Kivity unsigned long cr0;
2507d36db35SAvi Kivity
2517d36db35SAvi Kivity cr0 = read_cr0();
2527d36db35SAvi Kivity
2537d36db35SAvi Kivity msw = cr0 ^ 8;
2547d36db35SAvi Kivity asm("lmsw %0" : : "r"(msw));
2557d36db35SAvi Kivity printf("before %lx after %lx\n", cr0, read_cr0());
256a299895bSThomas Huth report((cr0 ^ read_cr0()) == 8, "lmsw (1)");
2577d36db35SAvi Kivity
2587d36db35SAvi Kivity pmsw = (void *)mem;
2597d36db35SAvi Kivity *pmsw = cr0;
2607d36db35SAvi Kivity asm("lmsw %0" : : "m"(*pmsw));
2617d36db35SAvi Kivity printf("before %lx after %lx\n", cr0, read_cr0());
262a299895bSThomas Huth report(cr0 == read_cr0(), "lmsw (2)");
2637d36db35SAvi Kivity
2647d36db35SAvi Kivity /* lmsw can't clear cr0.pe */
2657d36db35SAvi Kivity msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */
2667d36db35SAvi Kivity asm("lmsw %0" : : "r"(msw));
267a299895bSThomas Huth report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)");
2687d36db35SAvi Kivity
2697d36db35SAvi Kivity /* back to normal */
2707d36db35SAvi Kivity msw = cr0;
2717d36db35SAvi Kivity asm("lmsw %0" : : "r"(msw));
2727d36db35SAvi Kivity }
2737d36db35SAvi Kivity
test_btc(void * mem)274db4898e8SThomas Huth static void test_btc(void *mem)
275d4655eafSWei Yongjun {
276d4655eafSWei Yongjun unsigned int *a = mem;
277d4655eafSWei Yongjun
2787e083f20SNadav Amit memset(mem, 0, 4 * sizeof(unsigned int));
279d4655eafSWei Yongjun
280d4655eafSWei Yongjun asm ("btcl $32, %0" :: "m"(a[0]) : "memory");
281d4655eafSWei Yongjun asm ("btcl $1, %0" :: "m"(a[1]) : "memory");
282d4655eafSWei Yongjun asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory");
283a299895bSThomas Huth report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m");
284d4655eafSWei Yongjun
285d4655eafSWei Yongjun asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory");
286a299895bSThomas Huth report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m");
2877e083f20SNadav Amit
288bbdb7433SSean Christopherson #ifdef __x86_64__
2897e083f20SNadav Amit asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory");
290a299895bSThomas Huth report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0,
291a299895bSThomas Huth "btcq reg, r/m");
292bbdb7433SSean Christopherson #endif
293d4655eafSWei Yongjun }
294d4655eafSWei Yongjun
test_bsfbsr(void * mem)295db4898e8SThomas Huth static void test_bsfbsr(void *mem)
2962e16c7f6SWei Yongjun {
297554de466SAvi Kivity unsigned eax, *meml = mem;
298554de466SAvi Kivity unsigned short ax, *memw = mem;
299bbdb7433SSean Christopherson #ifdef __x86_64__
300bbdb7433SSean Christopherson unsigned long rax, *memq = mem;
301554de466SAvi Kivity unsigned char z;
302bbdb7433SSean Christopherson #endif
3032e16c7f6SWei Yongjun
304554de466SAvi Kivity *memw = 0xc000;
305554de466SAvi Kivity asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
306a299895bSThomas Huth report(ax == 14, "bsfw r/m, reg");
3072e16c7f6SWei Yongjun
308554de466SAvi Kivity *meml = 0xc0000000;
309554de466SAvi Kivity asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
310a299895bSThomas Huth report(eax == 30, "bsfl r/m, reg");
3112e16c7f6SWei Yongjun
312bbdb7433SSean Christopherson #ifdef __x86_64__
313554de466SAvi Kivity *memq = 0xc00000000000;
314554de466SAvi Kivity asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
315a299895bSThomas Huth report(rax == 46, "bsfq r/m, reg");
3162e16c7f6SWei Yongjun
317554de466SAvi Kivity *memq = 0;
318554de466SAvi Kivity asm("bsfq %[mem], %[a]; setz %[z]"
319554de466SAvi Kivity : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
320a299895bSThomas Huth report(z == 1, "bsfq r/m, reg");
321bbdb7433SSean Christopherson #endif
3222e16c7f6SWei Yongjun
323554de466SAvi Kivity *memw = 0xc000;
324554de466SAvi Kivity asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw));
325a299895bSThomas Huth report(ax == 15, "bsrw r/m, reg");
3262e16c7f6SWei Yongjun
327554de466SAvi Kivity *meml = 0xc0000000;
328554de466SAvi Kivity asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml));
329a299895bSThomas Huth report(eax == 31, "bsrl r/m, reg");
3302e16c7f6SWei Yongjun
331bbdb7433SSean Christopherson #ifdef __x86_64__
332554de466SAvi Kivity *memq = 0xc00000000000;
333554de466SAvi Kivity asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq));
334a299895bSThomas Huth report(rax == 47, "bsrq r/m, reg");
3352e16c7f6SWei Yongjun
336554de466SAvi Kivity *memq = 0;
337554de466SAvi Kivity asm("bsrq %[mem], %[a]; setz %[z]"
338554de466SAvi Kivity : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq));
339a299895bSThomas Huth report(z == 1, "bsrq r/m, reg");
340bbdb7433SSean Christopherson #endif
3412e16c7f6SWei Yongjun }
3422e16c7f6SWei Yongjun
test_imul(uint64_t * mem)343bbdb7433SSean Christopherson static void test_imul(uint64_t *mem)
34451d65a3cSAvi Kivity {
34551d65a3cSAvi Kivity ulong a;
34651d65a3cSAvi Kivity
347bbdb7433SSean Christopherson *mem = 51; a = 0x1234567812345678ULL & -1ul;;
34851d65a3cSAvi Kivity asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem));
349bbdb7433SSean Christopherson report(a == (0x12345678123439e8ULL & -1ul), "imul ax, mem");
35051d65a3cSAvi Kivity
351bbdb7433SSean Christopherson *mem = 51; a = 0x1234567812345678ULL & -1ul;;
35251d65a3cSAvi Kivity asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem));
353a299895bSThomas Huth report(a == 0xa06d39e8, "imul eax, mem");
35451d65a3cSAvi Kivity
355bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul;
356bbdb7433SSean Christopherson asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem));
357bbdb7433SSean Christopherson report(a == (0x87654321876539e8ULL & -1ul), "imul ax, mem, imm8");
358bbdb7433SSean Christopherson
359bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL;
360bbdb7433SSean Christopherson asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem));
361bbdb7433SSean Christopherson report(a == 0xa06d39e8, "imul eax, mem, imm8");
362bbdb7433SSean Christopherson
363bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul;
364bbdb7433SSean Christopherson asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem));
365bbdb7433SSean Christopherson report(a == (0x8765432187650bc8ULL & -1ul), "imul ax, mem, imm");
366bbdb7433SSean Christopherson
367bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL;
368bbdb7433SSean Christopherson asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem));
369bbdb7433SSean Christopherson report(a == 0x1d950bc8, "imul eax, mem, imm");
370bbdb7433SSean Christopherson
371bbdb7433SSean Christopherson #ifdef __x86_64__
37251d65a3cSAvi Kivity *mem = 51; a = 0x1234567812345678UL;
37351d65a3cSAvi Kivity asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem));
374a299895bSThomas Huth report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem");
37551d65a3cSAvi Kivity
37651d65a3cSAvi Kivity *mem = 0x1234567812345678UL;
37751d65a3cSAvi Kivity asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem));
378a299895bSThomas Huth report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem, imm8");
37951d65a3cSAvi Kivity
38051d65a3cSAvi Kivity *mem = 0x1234567812345678UL;
38151d65a3cSAvi Kivity asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem));
382a299895bSThomas Huth report(a == 0x1D950BDE1D950BC8L, "imul rax, mem, imm");
383bbdb7433SSean Christopherson #endif
38451d65a3cSAvi Kivity }
385d7f3ee3cSAvi Kivity typedef unsigned __attribute__((vector_size(16))) sse128;
386d7f3ee3cSAvi Kivity
sseeq(uint32_t * v1,uint32_t * v2)38793a3ae40SJacob Xu static bool sseeq(uint32_t *v1, uint32_t *v2)
388d7f3ee3cSAvi Kivity {
389d7f3ee3cSAvi Kivity bool ok = true;
390d7f3ee3cSAvi Kivity int i;
391d7f3ee3cSAvi Kivity
392*f485d340SSean Christopherson for (i = 0; i < 4; ++i)
39393a3ae40SJacob Xu ok &= v1[i] == v2[i];
394d7f3ee3cSAvi Kivity
395d7f3ee3cSAvi Kivity return ok;
396d7f3ee3cSAvi Kivity }
397d7f3ee3cSAvi Kivity
test_sse(uint32_t * mem)39893a3ae40SJacob Xu static __attribute__((target("sse2"))) void test_sse(uint32_t *mem)
399d7f3ee3cSAvi Kivity {
40093a3ae40SJacob Xu sse128 vv;
40193a3ae40SJacob Xu uint32_t *v = (uint32_t *)&vv;
402d7f3ee3cSAvi Kivity
403d7f3ee3cSAvi Kivity write_cr0(read_cr0() & ~6); /* EM, TS */
404d7f3ee3cSAvi Kivity write_cr4(read_cr4() | 0x200); /* OSFXSR */
40593a3ae40SJacob Xu memset(&vv, 0, sizeof(vv));
406290ed5d5SIgor Mammedov
4078726f977SJacob Xu #define TEST_RW_SSE(insn) do { \
40893a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; \
40993a3ae40SJacob Xu asm(insn " %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); \
41093a3ae40SJacob Xu report(sseeq(v, mem), insn " (read)"); \
41193a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; \
41293a3ae40SJacob Xu asm(insn " %1, %0" : "=x"(vv) : "m"(*mem) : "memory"); \
41393a3ae40SJacob Xu report(sseeq(v, mem), insn " (write)"); \
4148726f977SJacob Xu } while (0)
415f068a46aSIgor Mammedov
4168726f977SJacob Xu TEST_RW_SSE("movdqu");
4178726f977SJacob Xu TEST_RW_SSE("movaps");
4188726f977SJacob Xu TEST_RW_SSE("movapd");
4198726f977SJacob Xu TEST_RW_SSE("movups");
4208726f977SJacob Xu TEST_RW_SSE("movupd");
4218726f977SJacob Xu #undef TEST_RW_SSE
422d7f3ee3cSAvi Kivity }
423d7f3ee3cSAvi Kivity
unaligned_movaps_handler(struct ex_regs * regs)424e5e76263SJacob Xu static void unaligned_movaps_handler(struct ex_regs *regs)
425e5e76263SJacob Xu {
426e5e76263SJacob Xu extern char unaligned_movaps_cont;
427e5e76263SJacob Xu
428e5e76263SJacob Xu ++exceptions;
429e5e76263SJacob Xu regs->rip = (ulong)&unaligned_movaps_cont;
430e5e76263SJacob Xu }
431e5e76263SJacob Xu
cross_movups_handler(struct ex_regs * regs)432e5e76263SJacob Xu static void cross_movups_handler(struct ex_regs *regs)
433e5e76263SJacob Xu {
434e5e76263SJacob Xu extern char cross_movups_cont;
435e5e76263SJacob Xu
436e5e76263SJacob Xu ++exceptions;
437e5e76263SJacob Xu regs->rip = (ulong)&cross_movups_cont;
438e5e76263SJacob Xu }
439e5e76263SJacob Xu
test_sse_exceptions(void * cross_mem)440e5e76263SJacob Xu static __attribute__((target("sse2"))) void test_sse_exceptions(void *cross_mem)
441e5e76263SJacob Xu {
44293a3ae40SJacob Xu sse128 vv;
44393a3ae40SJacob Xu uint32_t *v = (uint32_t *)&vv;
44493a3ae40SJacob Xu uint32_t *mem;
445e5e76263SJacob Xu uint8_t *bytes = cross_mem; // aligned on PAGE_SIZE*2
446e5e76263SJacob Xu void *page2 = (void *)(&bytes[4096]);
447e5e76263SJacob Xu struct pte_search search;
448e5e76263SJacob Xu pteval_t orig_pte;
44915bfae71SMichal Luczaj handler old;
450e5e76263SJacob Xu
451e5e76263SJacob Xu // setup memory for unaligned access
45293a3ae40SJacob Xu mem = (uint32_t *)(&bytes[8]);
453e5e76263SJacob Xu
454e5e76263SJacob Xu // test unaligned access for movups, movupd and movaps
45593a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
45693a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 8; mem[3] = 9;
45793a3ae40SJacob Xu asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
45893a3ae40SJacob Xu report(sseeq(v, mem), "movups unaligned");
459e5e76263SJacob Xu
46093a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
46193a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8;
46293a3ae40SJacob Xu asm("movupd %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
46393a3ae40SJacob Xu report(sseeq(v, mem), "movupd unaligned");
464e5e76263SJacob Xu exceptions = 0;
46515bfae71SMichal Luczaj old = handle_exception(GP_VECTOR, unaligned_movaps_handler);
466e5e76263SJacob Xu asm("movaps %1, %0\n\t unaligned_movaps_cont:"
46793a3ae40SJacob Xu : "=m"(*mem) : "x"(vv));
46815bfae71SMichal Luczaj handle_exception(GP_VECTOR, old);
469e5e76263SJacob Xu report(exceptions == 1, "unaligned movaps exception");
470e5e76263SJacob Xu
471e5e76263SJacob Xu // setup memory for cross page access
47293a3ae40SJacob Xu mem = (uint32_t *)(&bytes[4096-8]);
47393a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4;
47493a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8;
475e5e76263SJacob Xu
47693a3ae40SJacob Xu asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory");
47793a3ae40SJacob Xu report(sseeq(v, mem), "movups unaligned crosspage");
478e5e76263SJacob Xu
479e5e76263SJacob Xu // invalidate second page
480e5e76263SJacob Xu search = find_pte_level(current_page_table(), page2, 1);
481e5e76263SJacob Xu orig_pte = *search.pte;
482e5e76263SJacob Xu install_pte(current_page_table(), 1, page2, 0, NULL);
483e5e76263SJacob Xu invlpg(page2);
484e5e76263SJacob Xu
485e5e76263SJacob Xu exceptions = 0;
48615bfae71SMichal Luczaj old = handle_exception(PF_VECTOR, cross_movups_handler);
48793a3ae40SJacob Xu asm("movups %1, %0\n\t cross_movups_cont:" : "=m"(*mem) : "x"(vv) :
48893a3ae40SJacob Xu "memory");
48915bfae71SMichal Luczaj handle_exception(PF_VECTOR, old);
490e5e76263SJacob Xu report(exceptions == 1, "movups crosspage exception");
491e5e76263SJacob Xu
492e5e76263SJacob Xu // restore invalidated page
493e5e76263SJacob Xu install_pte(current_page_table(), 1, page2, orig_pte, NULL);
494e5e76263SJacob Xu }
495e5e76263SJacob Xu
test_shld_shrd(u32 * mem)496b212fcdaSAvi Kivity static void test_shld_shrd(u32 *mem)
497b212fcdaSAvi Kivity {
498b212fcdaSAvi Kivity *mem = 0x12345678;
499b212fcdaSAvi Kivity asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3));
500a299895bSThomas Huth report(*mem == ((0x12345678 << 3) | 5), "shld (cl)");
501b212fcdaSAvi Kivity *mem = 0x12345678;
502b212fcdaSAvi Kivity asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3));
503a299895bSThomas Huth report(*mem == ((0x12345678 >> 3) | (5u << 29)), "shrd (cl)");
504b212fcdaSAvi Kivity }
505b212fcdaSAvi Kivity
test_smsw_reg(uint64_t * mem)50645fdc228SPaolo Bonzini static void test_smsw_reg(uint64_t *mem)
507313f4efeSNadav Amit {
508313f4efeSNadav Amit unsigned long cr0 = read_cr0();
50945fdc228SPaolo Bonzini unsigned long rax;
510bbdb7433SSean Christopherson const unsigned long in_rax = 0x1234567890abcdefull & -1ul;
511313f4efeSNadav Amit
51245fdc228SPaolo Bonzini asm(KVM_FEP "smsww %w0\n\t" : "=a" (rax) : "0" (in_rax));
513a299895bSThomas Huth report((u16)rax == (u16)cr0 && rax >> 16 == in_rax >> 16,
514a299895bSThomas Huth "16-bit smsw reg");
515313f4efeSNadav Amit
51645fdc228SPaolo Bonzini asm(KVM_FEP "smswl %k0\n\t" : "=a" (rax) : "0" (in_rax));
517a299895bSThomas Huth report(rax == (u32)cr0, "32-bit smsw reg");
518313f4efeSNadav Amit
519bbdb7433SSean Christopherson #ifdef __x86_64__
5202f394044SBill Wendling asm(KVM_FEP "smswq %q0\n\t" : "=a" (rax) : "0" (in_rax));
521a299895bSThomas Huth report(rax == cr0, "64-bit smsw reg");
522bbdb7433SSean Christopherson #endif
523313f4efeSNadav Amit }
524313f4efeSNadav Amit
test_nop(uint64_t * mem)52545fdc228SPaolo Bonzini static void test_nop(uint64_t *mem)
526ae399010SNadav Amit {
52745fdc228SPaolo Bonzini unsigned long rax;
528bbdb7433SSean Christopherson const unsigned long in_rax = 0x12345678ul;
52945fdc228SPaolo Bonzini asm(KVM_FEP "nop\n\t" : "=a" (rax) : "0" (in_rax));
530a299895bSThomas Huth report(rax == in_rax, "nop");
531ae399010SNadav Amit }
532ae399010SNadav Amit
test_mov_dr(uint64_t * mem)53345fdc228SPaolo Bonzini static void test_mov_dr(uint64_t *mem)
534b39a3e14SNadav Amit {
53545fdc228SPaolo Bonzini unsigned long rax;
536f1dcfd54SSean Christopherson
537bbdb7433SSean Christopherson asm(KVM_FEP "mov %0, %%dr6\n\t"
538bbdb7433SSean Christopherson KVM_FEP "mov %%dr6, %0\n\t" : "=a" (rax) : "a" (0));
539f1dcfd54SSean Christopherson
540f1dcfd54SSean Christopherson if (this_cpu_has(X86_FEATURE_RTM))
541f1dcfd54SSean Christopherson report(rax == (DR6_ACTIVE_LOW & ~DR6_RTM), "mov_dr6");
542f1dcfd54SSean Christopherson else
543f1dcfd54SSean Christopherson report(rax == DR6_ACTIVE_LOW, "mov_dr6");
544b39a3e14SNadav Amit }
545b39a3e14SNadav Amit
test_illegal_lea(void)5460dcb3fbaSMichal Luczaj static void test_illegal_lea(void)
5470dcb3fbaSMichal Luczaj {
5480dcb3fbaSMichal Luczaj unsigned int vector;
5490dcb3fbaSMichal Luczaj
5500dcb3fbaSMichal Luczaj asm volatile (ASM_TRY_FEP("1f")
5510dcb3fbaSMichal Luczaj ".byte 0x8d; .byte 0xc0\n\t"
5520dcb3fbaSMichal Luczaj "1:"
5530dcb3fbaSMichal Luczaj : : : "memory", "eax");
5540dcb3fbaSMichal Luczaj
5550dcb3fbaSMichal Luczaj vector = exception_vector();
5560dcb3fbaSMichal Luczaj report(vector == UD_VECTOR,
5570dcb3fbaSMichal Luczaj "Wanted #UD on LEA with /reg, got vector = %u", vector);
5580dcb3fbaSMichal Luczaj }
5590dcb3fbaSMichal Luczaj
test_crosspage_mmio(volatile uint8_t * mem)560ec278ce3SAvi Kivity static void test_crosspage_mmio(volatile uint8_t *mem)
561ec278ce3SAvi Kivity {
562ec278ce3SAvi Kivity volatile uint16_t w, *pw;
563ec278ce3SAvi Kivity
564ec278ce3SAvi Kivity pw = (volatile uint16_t *)&mem[4095];
565ec278ce3SAvi Kivity mem[4095] = 0x99;
566ec278ce3SAvi Kivity mem[4096] = 0x77;
567ec278ce3SAvi Kivity asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory");
568a299895bSThomas Huth report(w == 0x7799, "cross-page mmio read");
569ec278ce3SAvi Kivity asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa));
570a299895bSThomas Huth report(mem[4095] == 0xaa && mem[4096] == 0x88, "cross-page mmio write");
571ec278ce3SAvi Kivity }
572ec278ce3SAvi Kivity
test_string_io_mmio(volatile uint8_t * mem)573a19c7db7SXiao Guangrong static void test_string_io_mmio(volatile uint8_t *mem)
574a19c7db7SXiao Guangrong {
575a19c7db7SXiao Guangrong /* Cross MMIO pages.*/
576a19c7db7SXiao Guangrong volatile uint8_t *mmio = mem + 4032;
577a19c7db7SXiao Guangrong
578a19c7db7SXiao Guangrong asm volatile("outw %%ax, %%dx \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT));
579a19c7db7SXiao Guangrong
580a19c7db7SXiao Guangrong asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024));
581a19c7db7SXiao Guangrong
582a299895bSThomas Huth report(mmio[1023] == 0x99, "string_io_mmio");
583a19c7db7SXiao Guangrong }
584a19c7db7SXiao Guangrong
58556c6afa7SJan Kiszka /* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */
58656c6afa7SJan Kiszka #if 0
58747c1461aSAvi Kivity static void test_lgdt_lidt(volatile uint8_t *mem)
58847c1461aSAvi Kivity {
58947c1461aSAvi Kivity struct descriptor_table_ptr orig, fresh = {};
59047c1461aSAvi Kivity
59147c1461aSAvi Kivity sgdt(&orig);
59247c1461aSAvi Kivity *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
59347c1461aSAvi Kivity .limit = 0xf234,
59447c1461aSAvi Kivity .base = 0x12345678abcd,
59547c1461aSAvi Kivity };
59647c1461aSAvi Kivity cli();
59747c1461aSAvi Kivity asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
59847c1461aSAvi Kivity sgdt(&fresh);
59947c1461aSAvi Kivity lgdt(&orig);
60047c1461aSAvi Kivity sti();
601*f485d340SSean Christopherson report(orig.limit == fresh.limit && orig.base == fresh.base, "lgdt (long address)");
60247c1461aSAvi Kivity
60347c1461aSAvi Kivity sidt(&orig);
60447c1461aSAvi Kivity *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) {
60547c1461aSAvi Kivity .limit = 0x432f,
60647c1461aSAvi Kivity .base = 0xdbca87654321,
60747c1461aSAvi Kivity };
60847c1461aSAvi Kivity cli();
60947c1461aSAvi Kivity asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem));
61047c1461aSAvi Kivity sidt(&fresh);
61147c1461aSAvi Kivity lidt(&orig);
61247c1461aSAvi Kivity sti();
613*f485d340SSean Christopherson report(orig.limit == fresh.limit && orig.base == fresh.base, "lidt (long address)");
61447c1461aSAvi Kivity }
61556c6afa7SJan Kiszka #endif
61647c1461aSAvi Kivity
6174425dba6SPeter Feiner /* Broken emulation causes triple fault, which skips the other tests. */
6184425dba6SPeter Feiner #if 0
619cb615a4dSAvi Kivity static void test_lldt(volatile uint16_t *mem)
620cb615a4dSAvi Kivity {
6214425dba6SPeter Feiner u64 gdt[] = { 0, /* null descriptor */
6224425dba6SPeter Feiner #ifdef __X86_64__
6234425dba6SPeter Feiner 0, /* ldt descriptor is 16 bytes in long mode */
6244425dba6SPeter Feiner #endif
625*f485d340SSean Christopherson 0x0000f82000000ffffull /* ldt descriptor */
626*f485d340SSean Christopherson };
6274425dba6SPeter Feiner struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1,
6284425dba6SPeter Feiner .base = (ulong)&gdt };
629cb615a4dSAvi Kivity struct descriptor_table_ptr orig_gdt;
630cb615a4dSAvi Kivity
631cb615a4dSAvi Kivity cli();
632cb615a4dSAvi Kivity sgdt(&orig_gdt);
633cb615a4dSAvi Kivity lgdt(&gdt_ptr);
634cb615a4dSAvi Kivity *mem = 0x8;
635cb615a4dSAvi Kivity asm volatile("lldt %0" : : "m"(*mem));
636cb615a4dSAvi Kivity lgdt(&orig_gdt);
637cb615a4dSAvi Kivity sti();
638a299895bSThomas Huth report(sldt() == *mem, "lldt");
639cb615a4dSAvi Kivity }
6404425dba6SPeter Feiner #endif
641cb615a4dSAvi Kivity
test_ltr(volatile uint16_t * mem)64258a9d81eSAvi Kivity static void test_ltr(volatile uint16_t *mem)
64358a9d81eSAvi Kivity {
64458a9d81eSAvi Kivity struct descriptor_table_ptr gdt_ptr;
64558a9d81eSAvi Kivity uint64_t *gdt, *trp;
64658a9d81eSAvi Kivity uint16_t tr = str();
64758a9d81eSAvi Kivity uint64_t busy_mask = (uint64_t)1 << 41;
64858a9d81eSAvi Kivity
64958a9d81eSAvi Kivity sgdt(&gdt_ptr);
65058a9d81eSAvi Kivity gdt = (uint64_t *)gdt_ptr.base;
65158a9d81eSAvi Kivity trp = &gdt[tr >> 3];
65258a9d81eSAvi Kivity *trp &= ~busy_mask;
65358a9d81eSAvi Kivity *mem = tr;
65458a9d81eSAvi Kivity asm volatile("ltr %0" : : "m"(*mem) : "memory");
655a299895bSThomas Huth report(str() == tr && (*trp & busy_mask), "ltr");
65658a9d81eSAvi Kivity }
65758a9d81eSAvi Kivity
test_mov(void * mem)658215ad64cSSean Christopherson static void test_mov(void *mem)
659215ad64cSSean Christopherson {
660215ad64cSSean Christopherson unsigned long t1, t2;
661215ad64cSSean Christopherson
662215ad64cSSean Christopherson // test mov reg, r/m and mov r/m, reg
663bbdb7433SSean Christopherson t1 = 0x123456789abcdefull & -1ul;
664215ad64cSSean Christopherson asm volatile("mov %[t1], (%[mem]) \n\t"
665215ad64cSSean Christopherson "mov (%[mem]), %[t2]"
666215ad64cSSean Christopherson : [t2]"=r"(t2)
667215ad64cSSean Christopherson : [t1]"r"(t1), [mem]"r"(mem)
668215ad64cSSean Christopherson : "memory");
669bbdb7433SSean Christopherson report(t2 == (0x123456789abcdefull & -1ul), "mov reg, r/m (1)");
670215ad64cSSean Christopherson }
671215ad64cSSean Christopherson
test_simplealu(u32 * mem)6726cff92ddSAvi Kivity static void test_simplealu(u32 *mem)
6736cff92ddSAvi Kivity {
6746cff92ddSAvi Kivity *mem = 0x1234;
6756cff92ddSAvi Kivity asm("or %1, %0" : "+m"(*mem) : "r"(0x8001));
676a299895bSThomas Huth report(*mem == 0x9235, "or");
6776cff92ddSAvi Kivity asm("add %1, %0" : "+m"(*mem) : "r"(2));
678a299895bSThomas Huth report(*mem == 0x9237, "add");
6796cff92ddSAvi Kivity asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111));
680a299895bSThomas Huth report(*mem == 0x8326, "xor");
6816cff92ddSAvi Kivity asm("sub %1, %0" : "+m"(*mem) : "r"(0x26));
682a299895bSThomas Huth report(*mem == 0x8300, "sub");
6836cff92ddSAvi Kivity asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
684a299895bSThomas Huth report(*mem == 0x8400, "adc(0)");
6856cff92ddSAvi Kivity asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100));
686a299895bSThomas Huth report(*mem == 0x8501, "adc(0)");
6876cff92ddSAvi Kivity asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0));
688a299895bSThomas Huth report(*mem == 0x8501, "sbb(0)");
6896cff92ddSAvi Kivity asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0));
690a299895bSThomas Huth report(*mem == 0x8500, "sbb(1)");
6916cff92ddSAvi Kivity asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77));
692a299895bSThomas Huth report(*mem == 0x8400, "and");
6936cff92ddSAvi Kivity asm("test %1, %0" : "+m"(*mem) : "r"(0xf000));
694a299895bSThomas Huth report(*mem == 0x8400, "test");
6956cff92ddSAvi Kivity }
6966cff92ddSAvi Kivity
test_illegal_movbe(void)69770bdcadbSNadav Amit static void test_illegal_movbe(void)
69870bdcadbSNadav Amit {
6993af47210SMichal Luczaj unsigned int vector;
7003af47210SMichal Luczaj
701badc98caSKrish Sadhukhan if (!this_cpu_has(X86_FEATURE_MOVBE)) {
7023af47210SMichal Luczaj report_skip("MOVBE unsupported by CPU");
70370bdcadbSNadav Amit return;
70470bdcadbSNadav Amit }
70570bdcadbSNadav Amit
7063af47210SMichal Luczaj asm volatile(ASM_TRY("1f")
7073af47210SMichal Luczaj ".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t"
7083af47210SMichal Luczaj "1:"
7093af47210SMichal Luczaj : : : "memory", "rax");
7103af47210SMichal Luczaj
7113af47210SMichal Luczaj vector = exception_vector();
7123af47210SMichal Luczaj report(vector == UD_VECTOR,
7133af47210SMichal Luczaj "Wanted #UD on MOVBE with /reg, got vector = %u", vector);
71470bdcadbSNadav Amit }
71570bdcadbSNadav Amit
716c4d38af0SMichal Luczaj #ifdef __x86_64__
717c4d38af0SMichal Luczaj #define RIP_RELATIVE "(%%rip)"
718c4d38af0SMichal Luczaj #else
719c4d38af0SMichal Luczaj #define RIP_RELATIVE ""
720c4d38af0SMichal Luczaj #endif
721c4d38af0SMichal Luczaj
handle_db(struct ex_regs * regs)722c4d38af0SMichal Luczaj static void handle_db(struct ex_regs *regs)
723c4d38af0SMichal Luczaj {
724c4d38af0SMichal Luczaj ++exceptions;
725c4d38af0SMichal Luczaj regs->rflags |= X86_EFLAGS_RF;
726c4d38af0SMichal Luczaj }
727c4d38af0SMichal Luczaj
test_mov_pop_ss_code_db(void)728c4d38af0SMichal Luczaj static void test_mov_pop_ss_code_db(void)
729c4d38af0SMichal Luczaj {
730c4d38af0SMichal Luczaj handler old_db_handler = handle_exception(DB_VECTOR, handle_db);
731c4d38af0SMichal Luczaj bool fep_available = is_fep_available();
732c4d38af0SMichal Luczaj /* On Intel, code #DBs are inhibited when MOV/POP SS blocking is active. */
733c4d38af0SMichal Luczaj int nr_expected = is_intel() ? 0 : 1;
734c4d38af0SMichal Luczaj
735c4d38af0SMichal Luczaj write_dr7(DR7_FIXED_1 |
736c4d38af0SMichal Luczaj DR7_GLOBAL_ENABLE_DRx(0) |
737c4d38af0SMichal Luczaj DR7_EXECUTE_DRx(0) |
738c4d38af0SMichal Luczaj DR7_LEN_1_DRx(0));
739c4d38af0SMichal Luczaj
740c4d38af0SMichal Luczaj #define MOV_POP_SS_DB(desc, fep1, fep2, insn, store_ss, load_ss) \
741c4d38af0SMichal Luczaj ({ \
742c4d38af0SMichal Luczaj unsigned long r; \
743c4d38af0SMichal Luczaj \
744c4d38af0SMichal Luczaj exceptions = 0; \
745c4d38af0SMichal Luczaj asm volatile("lea 1f " RIP_RELATIVE ", %0\n\t" \
746c4d38af0SMichal Luczaj "mov %0, %%dr0\n\t" \
747c4d38af0SMichal Luczaj store_ss \
748c4d38af0SMichal Luczaj fep1 load_ss \
749c4d38af0SMichal Luczaj fep2 "1: xor %0, %0\n\t" \
750c4d38af0SMichal Luczaj "2:" \
751c4d38af0SMichal Luczaj : "=r" (r) \
752c4d38af0SMichal Luczaj : \
753c4d38af0SMichal Luczaj : "memory"); \
754c4d38af0SMichal Luczaj report(exceptions == nr_expected && !r, \
755c4d38af0SMichal Luczaj desc ": #DB %s after " insn " SS", \
756c4d38af0SMichal Luczaj nr_expected ? "occurred" : "suppressed"); \
757c4d38af0SMichal Luczaj })
758c4d38af0SMichal Luczaj
759c4d38af0SMichal Luczaj #define MOV_SS_DB(desc, fep1, fep2) \
760c4d38af0SMichal Luczaj MOV_POP_SS_DB(desc, fep1, fep2, "MOV", \
761c4d38af0SMichal Luczaj "mov %%ss, %0\n\t", "mov %0, %%ss\n\t")
762c4d38af0SMichal Luczaj
763c4d38af0SMichal Luczaj MOV_SS_DB("no fep", "", "");
764c4d38af0SMichal Luczaj if (fep_available) {
765c4d38af0SMichal Luczaj MOV_SS_DB("fep MOV-SS", KVM_FEP, "");
766c4d38af0SMichal Luczaj MOV_SS_DB("fep XOR", "", KVM_FEP);
767c4d38af0SMichal Luczaj MOV_SS_DB("fep MOV-SS/fep XOR", KVM_FEP, KVM_FEP);
768c4d38af0SMichal Luczaj }
769c4d38af0SMichal Luczaj
770c4d38af0SMichal Luczaj /* PUSH/POP SS are invalid in 64-bit mode. */
771c4d38af0SMichal Luczaj #ifndef __x86_64__
772c4d38af0SMichal Luczaj #define POP_SS_DB(desc, fep1, fep2) \
773c4d38af0SMichal Luczaj MOV_POP_SS_DB(desc, fep1, fep2, "POP", \
774c4d38af0SMichal Luczaj "push %%ss\n\t", "pop %%ss\n\t")
775c4d38af0SMichal Luczaj
776c4d38af0SMichal Luczaj POP_SS_DB("no fep", "", "");
777c4d38af0SMichal Luczaj if (fep_available) {
778c4d38af0SMichal Luczaj POP_SS_DB("fep POP-SS", KVM_FEP, "");
779c4d38af0SMichal Luczaj POP_SS_DB("fep XOR", "", KVM_FEP);
780c4d38af0SMichal Luczaj POP_SS_DB("fep POP-SS/fep XOR", KVM_FEP, KVM_FEP);
781c4d38af0SMichal Luczaj }
782c4d38af0SMichal Luczaj #endif
783c4d38af0SMichal Luczaj
784c4d38af0SMichal Luczaj write_dr7(DR7_FIXED_1);
785c4d38af0SMichal Luczaj
786c4d38af0SMichal Luczaj handle_exception(DB_VECTOR, old_db_handler);
787c4d38af0SMichal Luczaj }
788c4d38af0SMichal Luczaj
main(void)7897db17e21SThomas Huth int main(void)
7907d36db35SAvi Kivity {
7917d36db35SAvi Kivity void *mem;
792e5e76263SJacob Xu void *cross_mem;
7937d36db35SAvi Kivity
794bbdb7433SSean Christopherson if (!is_fep_available())
795bbdb7433SSean Christopherson report_skip("Skipping tests the require forced emulation, "
796bbdb7433SSean Christopherson "use kvm.force_emulation_prefix=1 to enable");
797bbdb7433SSean Christopherson
7987d36db35SAvi Kivity setup_vm();
79945fdc228SPaolo Bonzini
800ec278ce3SAvi Kivity mem = alloc_vpages(2);
801ec278ce3SAvi Kivity install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem);
802ec278ce3SAvi Kivity // install the page twice to test cross-page mmio
803ec278ce3SAvi Kivity install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096);
804e5e76263SJacob Xu cross_mem = vmap(virt_to_phys(alloc_pages(2)), 2 * PAGE_SIZE);
8057d36db35SAvi Kivity
806215ad64cSSean Christopherson test_mov(mem);
8076cff92ddSAvi Kivity test_simplealu(mem);
8087d36db35SAvi Kivity test_cmps(mem);
80980a4ea7bSAvi Kivity test_scas(mem);
8104003963dSNadav Amit test_smsw(mem);
8117d36db35SAvi Kivity test_lmsw();
8127d36db35SAvi Kivity test_stringio();
8137d36db35SAvi Kivity test_incdecnotneg(mem);
814d4655eafSWei Yongjun test_btc(mem);
8152e16c7f6SWei Yongjun test_bsfbsr(mem);
81651d65a3cSAvi Kivity test_imul(mem);
817d7f3ee3cSAvi Kivity test_sse(mem);
818e5e76263SJacob Xu test_sse_exceptions(cross_mem);
819b212fcdaSAvi Kivity test_shld_shrd(mem);
82047c1461aSAvi Kivity //test_lgdt_lidt(mem);
8214425dba6SPeter Feiner //test_lldt(mem);
82258a9d81eSAvi Kivity test_ltr(mem);
8237d36db35SAvi Kivity
8247948d4b6SSean Christopherson if (is_fep_available()) {
82545fdc228SPaolo Bonzini test_smsw_reg(mem);
82645fdc228SPaolo Bonzini test_nop(mem);
82745fdc228SPaolo Bonzini test_mov_dr(mem);
8280dcb3fbaSMichal Luczaj test_illegal_lea();
82945fdc228SPaolo Bonzini }
83045fdc228SPaolo Bonzini
831ec278ce3SAvi Kivity test_crosspage_mmio(mem);
832ec278ce3SAvi Kivity
833a19c7db7SXiao Guangrong test_string_io_mmio(mem);
83470bdcadbSNadav Amit test_illegal_movbe();
835c4d38af0SMichal Luczaj test_mov_pop_ss_code_db();
836f413c1afSNadav Amit
837bbdb7433SSean Christopherson #ifdef __x86_64__
838bbdb7433SSean Christopherson test_emulator_64(mem);
839bbdb7433SSean Christopherson #endif
840f3cdd159SJan Kiszka return report_summary();
8417d36db35SAvi Kivity }
842