1f1dcfd54SSean Christopherson #include <asm/debugreg.h> 2f1dcfd54SSean Christopherson 37d36db35SAvi Kivity #include "ioram.h" 47d36db35SAvi Kivity #include "vm.h" 57d36db35SAvi Kivity #include "libcflat.h" 6e7c37968SGleb Natapov #include "desc.h" 7d7143f32SAvi Kivity #include "types.h" 8b39a3e14SNadav Amit #include "processor.h" 9efd8e5aaSPaolo Bonzini #include "vmalloc.h" 105aca024eSPaolo Bonzini #include "alloc_page.h" 113ee1b91bSBin Meng #include "usermode.h" 127d36db35SAvi Kivity 137d36db35SAvi Kivity #define TESTDEV_IO_PORT 0xe0 147d36db35SAvi Kivity 15d7143f32SAvi Kivity static int exceptions; 16d7143f32SAvi Kivity 17bbdb7433SSean Christopherson #ifdef __x86_64__ 18bbdb7433SSean Christopherson #include "emulator64.c" 19bbdb7433SSean Christopherson #endif 20bbdb7433SSean Christopherson 217d36db35SAvi Kivity static char st1[] = "abcdefghijklmnop"; 227d36db35SAvi Kivity 237db17e21SThomas Huth static void test_stringio(void) 247d36db35SAvi Kivity { 257d36db35SAvi Kivity unsigned char r = 0; 267d36db35SAvi Kivity asm volatile("cld \n\t" 277d36db35SAvi Kivity "movw %0, %%dx \n\t" 287d36db35SAvi Kivity "rep outsb \n\t" 297d36db35SAvi Kivity : : "i"((short)TESTDEV_IO_PORT), 307d36db35SAvi Kivity "S"(st1), "c"(sizeof(st1) - 1)); 317d36db35SAvi Kivity asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT)); 32a299895bSThomas Huth report(r == st1[sizeof(st1) - 2], "outsb up"); /* last char */ 337d36db35SAvi Kivity 347d36db35SAvi Kivity asm volatile("std \n\t" 357d36db35SAvi Kivity "movw %0, %%dx \n\t" 367d36db35SAvi Kivity "rep outsb \n\t" 377d36db35SAvi Kivity : : "i"((short)TESTDEV_IO_PORT), 387d36db35SAvi Kivity "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1)); 397d36db35SAvi Kivity asm volatile("cld \n\t" : : ); 407d36db35SAvi Kivity asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT)); 41a299895bSThomas Huth report(r == st1[0], "outsb down"); 427d36db35SAvi Kivity } 437d36db35SAvi Kivity 44db4898e8SThomas Huth static void test_cmps_one(unsigned char *m1, unsigned char *m3) 457d36db35SAvi Kivity { 467d36db35SAvi Kivity void *rsi, *rdi; 477d36db35SAvi Kivity long rcx, tmp; 487d36db35SAvi Kivity 497d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 30; 507d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 512d331a4dSRoman Bolshakov "repe cmpsb" 527d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 537d36db35SAvi Kivity : : "cc"); 54a299895bSThomas Huth report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)"); 557d36db35SAvi Kivity 5651ba4180SAvi Kivity rsi = m1; rdi = m3; rcx = 30; 5751ba4180SAvi Kivity asm volatile("or $1, %[tmp]\n\t" // clear ZF 582d331a4dSRoman Bolshakov "repe cmpsb" 5951ba4180SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 6051ba4180SAvi Kivity : : "cc"); 61a299895bSThomas Huth report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, 622d331a4dSRoman Bolshakov "repe cmpsb (1.zf)"); 6351ba4180SAvi Kivity 647d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 15; 657d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 662d331a4dSRoman Bolshakov "repe cmpsw" 677d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 687d36db35SAvi Kivity : : "cc"); 692d331a4dSRoman Bolshakov report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)"); 707d36db35SAvi Kivity 717d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 7; 727d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 732d331a4dSRoman Bolshakov "repe cmpsl" 747d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 757d36db35SAvi Kivity : : "cc"); 762d331a4dSRoman Bolshakov report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)"); 777d36db35SAvi Kivity 78bbdb7433SSean Christopherson #ifdef __x86_64__ 797d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 4; 807d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 812d331a4dSRoman Bolshakov "repe cmpsq" 827d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 837d36db35SAvi Kivity : : "cc"); 842d331a4dSRoman Bolshakov report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)"); 85bbdb7433SSean Christopherson #endif 867d36db35SAvi Kivity 877d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 130; 887d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 892d331a4dSRoman Bolshakov "repe cmpsb" 907d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 917d36db35SAvi Kivity : : "cc"); 92a299895bSThomas Huth report(rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101, 932d331a4dSRoman Bolshakov "repe cmpsb (2)"); 947d36db35SAvi Kivity 957d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 65; 967d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 972d331a4dSRoman Bolshakov "repe cmpsw" 987d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 997d36db35SAvi Kivity : : "cc"); 100a299895bSThomas Huth report(rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102, 1012d331a4dSRoman Bolshakov "repe cmpsw (2)"); 1027d36db35SAvi Kivity 1037d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 32; 1047d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 1052d331a4dSRoman Bolshakov "repe cmpsl" 1067d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 1077d36db35SAvi Kivity : : "cc"); 108a299895bSThomas Huth report(rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104, 1092d331a4dSRoman Bolshakov "repe cmpll (2)"); 1107d36db35SAvi Kivity 111bbdb7433SSean Christopherson #ifdef __x86_64__ 1127d36db35SAvi Kivity rsi = m1; rdi = m3; rcx = 16; 1137d36db35SAvi Kivity asm volatile("xor %[tmp], %[tmp] \n\t" 1142d331a4dSRoman Bolshakov "repe cmpsq" 1157d36db35SAvi Kivity : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 1167d36db35SAvi Kivity : : "cc"); 117a299895bSThomas Huth report(rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104, 1182d331a4dSRoman Bolshakov "repe cmpsq (2)"); 119bbdb7433SSean Christopherson #endif 1207d36db35SAvi Kivity } 1217d36db35SAvi Kivity 122db4898e8SThomas Huth static void test_cmps(void *mem) 1237d36db35SAvi Kivity { 1247d36db35SAvi Kivity unsigned char *m1 = mem, *m2 = mem + 1024; 1257d36db35SAvi Kivity unsigned char m3[1024]; 1267d36db35SAvi Kivity 1277d36db35SAvi Kivity for (int i = 0; i < 100; ++i) 1287d36db35SAvi Kivity m1[i] = m2[i] = m3[i] = i; 1297d36db35SAvi Kivity for (int i = 100; i < 200; ++i) 1307d36db35SAvi Kivity m1[i] = (m3[i] = m2[i] = i) + 1; 1317d36db35SAvi Kivity test_cmps_one(m1, m3); 1327d36db35SAvi Kivity test_cmps_one(m1, m2); 1337d36db35SAvi Kivity } 1347d36db35SAvi Kivity 135db4898e8SThomas Huth static void test_scas(void *mem) 13680a4ea7bSAvi Kivity { 13780a4ea7bSAvi Kivity bool z; 13880a4ea7bSAvi Kivity void *di; 13980a4ea7bSAvi Kivity 140bbdb7433SSean Christopherson *(uint64_t *)mem = 0x77665544332211; 14180a4ea7bSAvi Kivity 14280a4ea7bSAvi Kivity di = mem; 14380a4ea7bSAvi Kivity asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11)); 144a299895bSThomas Huth report(di == mem + 1 && z, "scasb match"); 14580a4ea7bSAvi Kivity 14680a4ea7bSAvi Kivity di = mem; 14780a4ea7bSAvi Kivity asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54)); 148a299895bSThomas Huth report(di == mem + 1 && !z, "scasb mismatch"); 14980a4ea7bSAvi Kivity 15080a4ea7bSAvi Kivity di = mem; 15180a4ea7bSAvi Kivity asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211)); 152a299895bSThomas Huth report(di == mem + 2 && z, "scasw match"); 15380a4ea7bSAvi Kivity 15480a4ea7bSAvi Kivity di = mem; 15580a4ea7bSAvi Kivity asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11)); 156a299895bSThomas Huth report(di == mem + 2 && !z, "scasw mismatch"); 15780a4ea7bSAvi Kivity 15880a4ea7bSAvi Kivity di = mem; 159bbdb7433SSean Christopherson asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"((ulong)0xff44332211ul)); 160a299895bSThomas Huth report(di == mem + 4 && z, "scasd match"); 16180a4ea7bSAvi Kivity 16280a4ea7bSAvi Kivity di = mem; 16380a4ea7bSAvi Kivity asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211)); 164a299895bSThomas Huth report(di == mem + 4 && !z, "scasd mismatch"); 16580a4ea7bSAvi Kivity 166bbdb7433SSean Christopherson #ifdef __x86_64__ 16780a4ea7bSAvi Kivity di = mem; 16880a4ea7bSAvi Kivity asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul)); 169a299895bSThomas Huth report(di == mem + 8 && z, "scasq match"); 17080a4ea7bSAvi Kivity 17180a4ea7bSAvi Kivity di = mem; 17280a4ea7bSAvi Kivity asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3)); 173a299895bSThomas Huth report(di == mem + 8 && !z, "scasq mismatch"); 174bbdb7433SSean Christopherson #endif 1757d36db35SAvi Kivity } 1767d36db35SAvi Kivity 177db4898e8SThomas Huth static void test_incdecnotneg(void *mem) 1787d36db35SAvi Kivity { 1797d36db35SAvi Kivity unsigned long *m = mem, v = 1234; 1807d36db35SAvi Kivity unsigned char *mb = mem, vb = 66; 1817d36db35SAvi Kivity 1827d36db35SAvi Kivity *m = 0; 1837d36db35SAvi Kivity 1847d36db35SAvi Kivity asm volatile ("incl %0":"+m"(*m)); 185a299895bSThomas Huth report(*m == 1, "incl"); 1867d36db35SAvi Kivity asm volatile ("decl %0":"+m"(*m)); 187a299895bSThomas Huth report(*m == 0, "decl"); 1887d36db35SAvi Kivity asm volatile ("incb %0":"+m"(*m)); 189a299895bSThomas Huth report(*m == 1, "incb"); 1907d36db35SAvi Kivity asm volatile ("decb %0":"+m"(*m)); 191a299895bSThomas Huth report(*m == 0, "decb"); 1927d36db35SAvi Kivity 1937d36db35SAvi Kivity asm volatile ("lock incl %0":"+m"(*m)); 194a299895bSThomas Huth report(*m == 1, "lock incl"); 1957d36db35SAvi Kivity asm volatile ("lock decl %0":"+m"(*m)); 196a299895bSThomas Huth report(*m == 0, "lock decl"); 1977d36db35SAvi Kivity asm volatile ("lock incb %0":"+m"(*m)); 198a299895bSThomas Huth report(*m == 1, "lock incb"); 1997d36db35SAvi Kivity asm volatile ("lock decb %0":"+m"(*m)); 200a299895bSThomas Huth report(*m == 0, "lock decb"); 2017d36db35SAvi Kivity 2027d36db35SAvi Kivity *m = v; 2037d36db35SAvi Kivity 204bbdb7433SSean Christopherson #ifdef __x86_64__ 2057d36db35SAvi Kivity asm ("lock negq %0" : "+m"(*m)); v = -v; 206a299895bSThomas Huth report(*m == v, "lock negl"); 2077d36db35SAvi Kivity asm ("lock notq %0" : "+m"(*m)); v = ~v; 208a299895bSThomas Huth report(*m == v, "lock notl"); 209bbdb7433SSean Christopherson #endif 2107d36db35SAvi Kivity 2117d36db35SAvi Kivity *mb = vb; 2127d36db35SAvi Kivity 2137d36db35SAvi Kivity asm ("lock negb %0" : "+m"(*mb)); vb = -vb; 214a299895bSThomas Huth report(*mb == vb, "lock negb"); 2157d36db35SAvi Kivity asm ("lock notb %0" : "+m"(*mb)); vb = ~vb; 216a299895bSThomas Huth report(*mb == vb, "lock notb"); 2177d36db35SAvi Kivity } 2187d36db35SAvi Kivity 219bbdb7433SSean Christopherson static void test_smsw(unsigned long *h_mem) 2207d36db35SAvi Kivity { 2217d36db35SAvi Kivity char mem[16]; 2227d36db35SAvi Kivity unsigned short msw, msw_orig, *pmsw; 2237d36db35SAvi Kivity int i, zero; 2247d36db35SAvi Kivity 2257d36db35SAvi Kivity msw_orig = read_cr0(); 2267d36db35SAvi Kivity 2277d36db35SAvi Kivity asm("smsw %0" : "=r"(msw)); 228a299895bSThomas Huth report(msw == msw_orig, "smsw (1)"); 2297d36db35SAvi Kivity 2307d36db35SAvi Kivity memset(mem, 0, 16); 2317d36db35SAvi Kivity pmsw = (void *)mem; 2327d36db35SAvi Kivity asm("smsw %0" : "=m"(pmsw[4])); 2337d36db35SAvi Kivity zero = 1; 2347d36db35SAvi Kivity for (i = 0; i < 8; ++i) 2357d36db35SAvi Kivity if (i != 4 && pmsw[i]) 2367d36db35SAvi Kivity zero = 0; 237a299895bSThomas Huth report(msw == pmsw[4] && zero, "smsw (2)"); 2384003963dSNadav Amit 2394003963dSNadav Amit /* Trigger exit on smsw */ 240bbdb7433SSean Christopherson *h_mem = -1ul; 24111147080SChris J Arges asm volatile("smsw %0" : "+m"(*h_mem)); 242a299895bSThomas Huth report(msw == (unsigned short)*h_mem && 243bbdb7433SSean Christopherson (*h_mem & ~0xfffful) == (-1ul & ~0xfffful), "smsw (3)"); 2447d36db35SAvi Kivity } 2457d36db35SAvi Kivity 246db4898e8SThomas Huth static void test_lmsw(void) 2477d36db35SAvi Kivity { 2487d36db35SAvi Kivity char mem[16]; 2497d36db35SAvi Kivity unsigned short msw, *pmsw; 2507d36db35SAvi Kivity unsigned long cr0; 2517d36db35SAvi Kivity 2527d36db35SAvi Kivity cr0 = read_cr0(); 2537d36db35SAvi Kivity 2547d36db35SAvi Kivity msw = cr0 ^ 8; 2557d36db35SAvi Kivity asm("lmsw %0" : : "r"(msw)); 2567d36db35SAvi Kivity printf("before %lx after %lx\n", cr0, read_cr0()); 257a299895bSThomas Huth report((cr0 ^ read_cr0()) == 8, "lmsw (1)"); 2587d36db35SAvi Kivity 2597d36db35SAvi Kivity pmsw = (void *)mem; 2607d36db35SAvi Kivity *pmsw = cr0; 2617d36db35SAvi Kivity asm("lmsw %0" : : "m"(*pmsw)); 2627d36db35SAvi Kivity printf("before %lx after %lx\n", cr0, read_cr0()); 263a299895bSThomas Huth report(cr0 == read_cr0(), "lmsw (2)"); 2647d36db35SAvi Kivity 2657d36db35SAvi Kivity /* lmsw can't clear cr0.pe */ 2667d36db35SAvi Kivity msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */ 2677d36db35SAvi Kivity asm("lmsw %0" : : "r"(msw)); 268a299895bSThomas Huth report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)"); 2697d36db35SAvi Kivity 2707d36db35SAvi Kivity /* back to normal */ 2717d36db35SAvi Kivity msw = cr0; 2727d36db35SAvi Kivity asm("lmsw %0" : : "r"(msw)); 2737d36db35SAvi Kivity } 2747d36db35SAvi Kivity 275db4898e8SThomas Huth static void test_btc(void *mem) 276d4655eafSWei Yongjun { 277d4655eafSWei Yongjun unsigned int *a = mem; 278d4655eafSWei Yongjun 2797e083f20SNadav Amit memset(mem, 0, 4 * sizeof(unsigned int)); 280d4655eafSWei Yongjun 281d4655eafSWei Yongjun asm ("btcl $32, %0" :: "m"(a[0]) : "memory"); 282d4655eafSWei Yongjun asm ("btcl $1, %0" :: "m"(a[1]) : "memory"); 283d4655eafSWei Yongjun asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory"); 284a299895bSThomas Huth report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m"); 285d4655eafSWei Yongjun 286d4655eafSWei Yongjun asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory"); 287a299895bSThomas Huth report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m"); 2887e083f20SNadav Amit 289bbdb7433SSean Christopherson #ifdef __x86_64__ 2907e083f20SNadav Amit asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory"); 291a299895bSThomas Huth report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0, 292a299895bSThomas Huth "btcq reg, r/m"); 293bbdb7433SSean Christopherson #endif 294d4655eafSWei Yongjun } 295d4655eafSWei Yongjun 296db4898e8SThomas Huth static void test_bsfbsr(void *mem) 2972e16c7f6SWei Yongjun { 298554de466SAvi Kivity unsigned eax, *meml = mem; 299554de466SAvi Kivity unsigned short ax, *memw = mem; 300bbdb7433SSean Christopherson #ifdef __x86_64__ 301bbdb7433SSean Christopherson unsigned long rax, *memq = mem; 302554de466SAvi Kivity unsigned char z; 303bbdb7433SSean Christopherson #endif 3042e16c7f6SWei Yongjun 305554de466SAvi Kivity *memw = 0xc000; 306554de466SAvi Kivity asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw)); 307a299895bSThomas Huth report(ax == 14, "bsfw r/m, reg"); 3082e16c7f6SWei Yongjun 309554de466SAvi Kivity *meml = 0xc0000000; 310554de466SAvi Kivity asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml)); 311a299895bSThomas Huth report(eax == 30, "bsfl r/m, reg"); 3122e16c7f6SWei Yongjun 313bbdb7433SSean Christopherson #ifdef __x86_64__ 314554de466SAvi Kivity *memq = 0xc00000000000; 315554de466SAvi Kivity asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq)); 316a299895bSThomas Huth report(rax == 46, "bsfq r/m, reg"); 3172e16c7f6SWei Yongjun 318554de466SAvi Kivity *memq = 0; 319554de466SAvi Kivity asm("bsfq %[mem], %[a]; setz %[z]" 320554de466SAvi Kivity : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq)); 321a299895bSThomas Huth report(z == 1, "bsfq r/m, reg"); 322bbdb7433SSean Christopherson #endif 3232e16c7f6SWei Yongjun 324554de466SAvi Kivity *memw = 0xc000; 325554de466SAvi Kivity asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw)); 326a299895bSThomas Huth report(ax == 15, "bsrw r/m, reg"); 3272e16c7f6SWei Yongjun 328554de466SAvi Kivity *meml = 0xc0000000; 329554de466SAvi Kivity asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml)); 330a299895bSThomas Huth report(eax == 31, "bsrl r/m, reg"); 3312e16c7f6SWei Yongjun 332bbdb7433SSean Christopherson #ifdef __x86_64__ 333554de466SAvi Kivity *memq = 0xc00000000000; 334554de466SAvi Kivity asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq)); 335a299895bSThomas Huth report(rax == 47, "bsrq r/m, reg"); 3362e16c7f6SWei Yongjun 337554de466SAvi Kivity *memq = 0; 338554de466SAvi Kivity asm("bsrq %[mem], %[a]; setz %[z]" 339554de466SAvi Kivity : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq)); 340a299895bSThomas Huth report(z == 1, "bsrq r/m, reg"); 341bbdb7433SSean Christopherson #endif 3422e16c7f6SWei Yongjun } 3432e16c7f6SWei Yongjun 344bbdb7433SSean Christopherson static void test_imul(uint64_t *mem) 34551d65a3cSAvi Kivity { 34651d65a3cSAvi Kivity ulong a; 34751d65a3cSAvi Kivity 348bbdb7433SSean Christopherson *mem = 51; a = 0x1234567812345678ULL & -1ul;; 34951d65a3cSAvi Kivity asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem)); 350bbdb7433SSean Christopherson report(a == (0x12345678123439e8ULL & -1ul), "imul ax, mem"); 35151d65a3cSAvi Kivity 352bbdb7433SSean Christopherson *mem = 51; a = 0x1234567812345678ULL & -1ul;; 35351d65a3cSAvi Kivity asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem)); 354a299895bSThomas Huth report(a == 0xa06d39e8, "imul eax, mem"); 35551d65a3cSAvi Kivity 356bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul; 357bbdb7433SSean Christopherson asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem)); 358bbdb7433SSean Christopherson report(a == (0x87654321876539e8ULL & -1ul), "imul ax, mem, imm8"); 359bbdb7433SSean Christopherson 360bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL; 361bbdb7433SSean Christopherson asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem)); 362bbdb7433SSean Christopherson report(a == 0xa06d39e8, "imul eax, mem, imm8"); 363bbdb7433SSean Christopherson 364bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul; 365bbdb7433SSean Christopherson asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem)); 366bbdb7433SSean Christopherson report(a == (0x8765432187650bc8ULL & -1ul), "imul ax, mem, imm"); 367bbdb7433SSean Christopherson 368bbdb7433SSean Christopherson *mem = 0x1234567812345678ULL; 369bbdb7433SSean Christopherson asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem)); 370bbdb7433SSean Christopherson report(a == 0x1d950bc8, "imul eax, mem, imm"); 371bbdb7433SSean Christopherson 372bbdb7433SSean Christopherson #ifdef __x86_64__ 37351d65a3cSAvi Kivity *mem = 51; a = 0x1234567812345678UL; 37451d65a3cSAvi Kivity asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem)); 375a299895bSThomas Huth report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem"); 37651d65a3cSAvi Kivity 37751d65a3cSAvi Kivity *mem = 0x1234567812345678UL; 37851d65a3cSAvi Kivity asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem)); 379a299895bSThomas Huth report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem, imm8"); 38051d65a3cSAvi Kivity 38151d65a3cSAvi Kivity *mem = 0x1234567812345678UL; 38251d65a3cSAvi Kivity asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem)); 383a299895bSThomas Huth report(a == 0x1D950BDE1D950BC8L, "imul rax, mem, imm"); 384bbdb7433SSean Christopherson #endif 38551d65a3cSAvi Kivity } 386d7f3ee3cSAvi Kivity typedef unsigned __attribute__((vector_size(16))) sse128; 387d7f3ee3cSAvi Kivity 38893a3ae40SJacob Xu static bool sseeq(uint32_t *v1, uint32_t *v2) 389d7f3ee3cSAvi Kivity { 390d7f3ee3cSAvi Kivity bool ok = true; 391d7f3ee3cSAvi Kivity int i; 392d7f3ee3cSAvi Kivity 393*f485d340SSean Christopherson for (i = 0; i < 4; ++i) 39493a3ae40SJacob Xu ok &= v1[i] == v2[i]; 395d7f3ee3cSAvi Kivity 396d7f3ee3cSAvi Kivity return ok; 397d7f3ee3cSAvi Kivity } 398d7f3ee3cSAvi Kivity 39993a3ae40SJacob Xu static __attribute__((target("sse2"))) void test_sse(uint32_t *mem) 400d7f3ee3cSAvi Kivity { 40193a3ae40SJacob Xu sse128 vv; 40293a3ae40SJacob Xu uint32_t *v = (uint32_t *)&vv; 403d7f3ee3cSAvi Kivity 404d7f3ee3cSAvi Kivity write_cr0(read_cr0() & ~6); /* EM, TS */ 405d7f3ee3cSAvi Kivity write_cr4(read_cr4() | 0x200); /* OSFXSR */ 40693a3ae40SJacob Xu memset(&vv, 0, sizeof(vv)); 407290ed5d5SIgor Mammedov 4088726f977SJacob Xu #define TEST_RW_SSE(insn) do { \ 40993a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; \ 41093a3ae40SJacob Xu asm(insn " %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); \ 41193a3ae40SJacob Xu report(sseeq(v, mem), insn " (read)"); \ 41293a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; \ 41393a3ae40SJacob Xu asm(insn " %1, %0" : "=x"(vv) : "m"(*mem) : "memory"); \ 41493a3ae40SJacob Xu report(sseeq(v, mem), insn " (write)"); \ 4158726f977SJacob Xu } while (0) 416f068a46aSIgor Mammedov 4178726f977SJacob Xu TEST_RW_SSE("movdqu"); 4188726f977SJacob Xu TEST_RW_SSE("movaps"); 4198726f977SJacob Xu TEST_RW_SSE("movapd"); 4208726f977SJacob Xu TEST_RW_SSE("movups"); 4218726f977SJacob Xu TEST_RW_SSE("movupd"); 4228726f977SJacob Xu #undef TEST_RW_SSE 423d7f3ee3cSAvi Kivity } 424d7f3ee3cSAvi Kivity 425e5e76263SJacob Xu static void unaligned_movaps_handler(struct ex_regs *regs) 426e5e76263SJacob Xu { 427e5e76263SJacob Xu extern char unaligned_movaps_cont; 428e5e76263SJacob Xu 429e5e76263SJacob Xu ++exceptions; 430e5e76263SJacob Xu regs->rip = (ulong)&unaligned_movaps_cont; 431e5e76263SJacob Xu } 432e5e76263SJacob Xu 433e5e76263SJacob Xu static void cross_movups_handler(struct ex_regs *regs) 434e5e76263SJacob Xu { 435e5e76263SJacob Xu extern char cross_movups_cont; 436e5e76263SJacob Xu 437e5e76263SJacob Xu ++exceptions; 438e5e76263SJacob Xu regs->rip = (ulong)&cross_movups_cont; 439e5e76263SJacob Xu } 440e5e76263SJacob Xu 441e5e76263SJacob Xu static __attribute__((target("sse2"))) void test_sse_exceptions(void *cross_mem) 442e5e76263SJacob Xu { 44393a3ae40SJacob Xu sse128 vv; 44493a3ae40SJacob Xu uint32_t *v = (uint32_t *)&vv; 44593a3ae40SJacob Xu uint32_t *mem; 446e5e76263SJacob Xu uint8_t *bytes = cross_mem; // aligned on PAGE_SIZE*2 447e5e76263SJacob Xu void *page2 = (void *)(&bytes[4096]); 448e5e76263SJacob Xu struct pte_search search; 449e5e76263SJacob Xu pteval_t orig_pte; 45015bfae71SMichal Luczaj handler old; 451e5e76263SJacob Xu 452e5e76263SJacob Xu // setup memory for unaligned access 45393a3ae40SJacob Xu mem = (uint32_t *)(&bytes[8]); 454e5e76263SJacob Xu 455e5e76263SJacob Xu // test unaligned access for movups, movupd and movaps 45693a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; 45793a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 8; mem[3] = 9; 45893a3ae40SJacob Xu asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); 45993a3ae40SJacob Xu report(sseeq(v, mem), "movups unaligned"); 460e5e76263SJacob Xu 46193a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; 46293a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; 46393a3ae40SJacob Xu asm("movupd %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); 46493a3ae40SJacob Xu report(sseeq(v, mem), "movupd unaligned"); 465e5e76263SJacob Xu exceptions = 0; 46615bfae71SMichal Luczaj old = handle_exception(GP_VECTOR, unaligned_movaps_handler); 467e5e76263SJacob Xu asm("movaps %1, %0\n\t unaligned_movaps_cont:" 46893a3ae40SJacob Xu : "=m"(*mem) : "x"(vv)); 46915bfae71SMichal Luczaj handle_exception(GP_VECTOR, old); 470e5e76263SJacob Xu report(exceptions == 1, "unaligned movaps exception"); 471e5e76263SJacob Xu 472e5e76263SJacob Xu // setup memory for cross page access 47393a3ae40SJacob Xu mem = (uint32_t *)(&bytes[4096-8]); 47493a3ae40SJacob Xu v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; 47593a3ae40SJacob Xu mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; 476e5e76263SJacob Xu 47793a3ae40SJacob Xu asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); 47893a3ae40SJacob Xu report(sseeq(v, mem), "movups unaligned crosspage"); 479e5e76263SJacob Xu 480e5e76263SJacob Xu // invalidate second page 481e5e76263SJacob Xu search = find_pte_level(current_page_table(), page2, 1); 482e5e76263SJacob Xu orig_pte = *search.pte; 483e5e76263SJacob Xu install_pte(current_page_table(), 1, page2, 0, NULL); 484e5e76263SJacob Xu invlpg(page2); 485e5e76263SJacob Xu 486e5e76263SJacob Xu exceptions = 0; 48715bfae71SMichal Luczaj old = handle_exception(PF_VECTOR, cross_movups_handler); 48893a3ae40SJacob Xu asm("movups %1, %0\n\t cross_movups_cont:" : "=m"(*mem) : "x"(vv) : 48993a3ae40SJacob Xu "memory"); 49015bfae71SMichal Luczaj handle_exception(PF_VECTOR, old); 491e5e76263SJacob Xu report(exceptions == 1, "movups crosspage exception"); 492e5e76263SJacob Xu 493e5e76263SJacob Xu // restore invalidated page 494e5e76263SJacob Xu install_pte(current_page_table(), 1, page2, orig_pte, NULL); 495e5e76263SJacob Xu } 496e5e76263SJacob Xu 497b212fcdaSAvi Kivity static void test_shld_shrd(u32 *mem) 498b212fcdaSAvi Kivity { 499b212fcdaSAvi Kivity *mem = 0x12345678; 500b212fcdaSAvi Kivity asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3)); 501a299895bSThomas Huth report(*mem == ((0x12345678 << 3) | 5), "shld (cl)"); 502b212fcdaSAvi Kivity *mem = 0x12345678; 503b212fcdaSAvi Kivity asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3)); 504a299895bSThomas Huth report(*mem == ((0x12345678 >> 3) | (5u << 29)), "shrd (cl)"); 505b212fcdaSAvi Kivity } 506b212fcdaSAvi Kivity 50745fdc228SPaolo Bonzini static void test_smsw_reg(uint64_t *mem) 508313f4efeSNadav Amit { 509313f4efeSNadav Amit unsigned long cr0 = read_cr0(); 51045fdc228SPaolo Bonzini unsigned long rax; 511bbdb7433SSean Christopherson const unsigned long in_rax = 0x1234567890abcdefull & -1ul; 512313f4efeSNadav Amit 51345fdc228SPaolo Bonzini asm(KVM_FEP "smsww %w0\n\t" : "=a" (rax) : "0" (in_rax)); 514a299895bSThomas Huth report((u16)rax == (u16)cr0 && rax >> 16 == in_rax >> 16, 515a299895bSThomas Huth "16-bit smsw reg"); 516313f4efeSNadav Amit 51745fdc228SPaolo Bonzini asm(KVM_FEP "smswl %k0\n\t" : "=a" (rax) : "0" (in_rax)); 518a299895bSThomas Huth report(rax == (u32)cr0, "32-bit smsw reg"); 519313f4efeSNadav Amit 520bbdb7433SSean Christopherson #ifdef __x86_64__ 5212f394044SBill Wendling asm(KVM_FEP "smswq %q0\n\t" : "=a" (rax) : "0" (in_rax)); 522a299895bSThomas Huth report(rax == cr0, "64-bit smsw reg"); 523bbdb7433SSean Christopherson #endif 524313f4efeSNadav Amit } 525313f4efeSNadav Amit 52645fdc228SPaolo Bonzini static void test_nop(uint64_t *mem) 527ae399010SNadav Amit { 52845fdc228SPaolo Bonzini unsigned long rax; 529bbdb7433SSean Christopherson const unsigned long in_rax = 0x12345678ul; 53045fdc228SPaolo Bonzini asm(KVM_FEP "nop\n\t" : "=a" (rax) : "0" (in_rax)); 531a299895bSThomas Huth report(rax == in_rax, "nop"); 532ae399010SNadav Amit } 533ae399010SNadav Amit 53445fdc228SPaolo Bonzini static void test_mov_dr(uint64_t *mem) 535b39a3e14SNadav Amit { 53645fdc228SPaolo Bonzini unsigned long rax; 537f1dcfd54SSean Christopherson 538bbdb7433SSean Christopherson asm(KVM_FEP "mov %0, %%dr6\n\t" 539bbdb7433SSean Christopherson KVM_FEP "mov %%dr6, %0\n\t" : "=a" (rax) : "a" (0)); 540f1dcfd54SSean Christopherson 541f1dcfd54SSean Christopherson if (this_cpu_has(X86_FEATURE_RTM)) 542f1dcfd54SSean Christopherson report(rax == (DR6_ACTIVE_LOW & ~DR6_RTM), "mov_dr6"); 543f1dcfd54SSean Christopherson else 544f1dcfd54SSean Christopherson report(rax == DR6_ACTIVE_LOW, "mov_dr6"); 545b39a3e14SNadav Amit } 546b39a3e14SNadav Amit 5470dcb3fbaSMichal Luczaj static void test_illegal_lea(void) 5480dcb3fbaSMichal Luczaj { 5490dcb3fbaSMichal Luczaj unsigned int vector; 5500dcb3fbaSMichal Luczaj 5510dcb3fbaSMichal Luczaj asm volatile (ASM_TRY_FEP("1f") 5520dcb3fbaSMichal Luczaj ".byte 0x8d; .byte 0xc0\n\t" 5530dcb3fbaSMichal Luczaj "1:" 5540dcb3fbaSMichal Luczaj : : : "memory", "eax"); 5550dcb3fbaSMichal Luczaj 5560dcb3fbaSMichal Luczaj vector = exception_vector(); 5570dcb3fbaSMichal Luczaj report(vector == UD_VECTOR, 5580dcb3fbaSMichal Luczaj "Wanted #UD on LEA with /reg, got vector = %u", vector); 5590dcb3fbaSMichal Luczaj } 5600dcb3fbaSMichal Luczaj 561ec278ce3SAvi Kivity static void test_crosspage_mmio(volatile uint8_t *mem) 562ec278ce3SAvi Kivity { 563ec278ce3SAvi Kivity volatile uint16_t w, *pw; 564ec278ce3SAvi Kivity 565ec278ce3SAvi Kivity pw = (volatile uint16_t *)&mem[4095]; 566ec278ce3SAvi Kivity mem[4095] = 0x99; 567ec278ce3SAvi Kivity mem[4096] = 0x77; 568ec278ce3SAvi Kivity asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory"); 569a299895bSThomas Huth report(w == 0x7799, "cross-page mmio read"); 570ec278ce3SAvi Kivity asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa)); 571a299895bSThomas Huth report(mem[4095] == 0xaa && mem[4096] == 0x88, "cross-page mmio write"); 572ec278ce3SAvi Kivity } 573ec278ce3SAvi Kivity 574a19c7db7SXiao Guangrong static void test_string_io_mmio(volatile uint8_t *mem) 575a19c7db7SXiao Guangrong { 576a19c7db7SXiao Guangrong /* Cross MMIO pages.*/ 577a19c7db7SXiao Guangrong volatile uint8_t *mmio = mem + 4032; 578a19c7db7SXiao Guangrong 579a19c7db7SXiao Guangrong asm volatile("outw %%ax, %%dx \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT)); 580a19c7db7SXiao Guangrong 581a19c7db7SXiao Guangrong asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024)); 582a19c7db7SXiao Guangrong 583a299895bSThomas Huth report(mmio[1023] == 0x99, "string_io_mmio"); 584a19c7db7SXiao Guangrong } 585a19c7db7SXiao Guangrong 58656c6afa7SJan Kiszka /* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */ 58756c6afa7SJan Kiszka #if 0 58847c1461aSAvi Kivity static void test_lgdt_lidt(volatile uint8_t *mem) 58947c1461aSAvi Kivity { 59047c1461aSAvi Kivity struct descriptor_table_ptr orig, fresh = {}; 59147c1461aSAvi Kivity 59247c1461aSAvi Kivity sgdt(&orig); 59347c1461aSAvi Kivity *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) { 59447c1461aSAvi Kivity .limit = 0xf234, 59547c1461aSAvi Kivity .base = 0x12345678abcd, 59647c1461aSAvi Kivity }; 59747c1461aSAvi Kivity cli(); 59847c1461aSAvi Kivity asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem)); 59947c1461aSAvi Kivity sgdt(&fresh); 60047c1461aSAvi Kivity lgdt(&orig); 60147c1461aSAvi Kivity sti(); 602*f485d340SSean Christopherson report(orig.limit == fresh.limit && orig.base == fresh.base, "lgdt (long address)"); 60347c1461aSAvi Kivity 60447c1461aSAvi Kivity sidt(&orig); 60547c1461aSAvi Kivity *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) { 60647c1461aSAvi Kivity .limit = 0x432f, 60747c1461aSAvi Kivity .base = 0xdbca87654321, 60847c1461aSAvi Kivity }; 60947c1461aSAvi Kivity cli(); 61047c1461aSAvi Kivity asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem)); 61147c1461aSAvi Kivity sidt(&fresh); 61247c1461aSAvi Kivity lidt(&orig); 61347c1461aSAvi Kivity sti(); 614*f485d340SSean Christopherson report(orig.limit == fresh.limit && orig.base == fresh.base, "lidt (long address)"); 61547c1461aSAvi Kivity } 61656c6afa7SJan Kiszka #endif 61747c1461aSAvi Kivity 6184425dba6SPeter Feiner /* Broken emulation causes triple fault, which skips the other tests. */ 6194425dba6SPeter Feiner #if 0 620cb615a4dSAvi Kivity static void test_lldt(volatile uint16_t *mem) 621cb615a4dSAvi Kivity { 6224425dba6SPeter Feiner u64 gdt[] = { 0, /* null descriptor */ 6234425dba6SPeter Feiner #ifdef __X86_64__ 6244425dba6SPeter Feiner 0, /* ldt descriptor is 16 bytes in long mode */ 6254425dba6SPeter Feiner #endif 626*f485d340SSean Christopherson 0x0000f82000000ffffull /* ldt descriptor */ 627*f485d340SSean Christopherson }; 6284425dba6SPeter Feiner struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1, 6294425dba6SPeter Feiner .base = (ulong)&gdt }; 630cb615a4dSAvi Kivity struct descriptor_table_ptr orig_gdt; 631cb615a4dSAvi Kivity 632cb615a4dSAvi Kivity cli(); 633cb615a4dSAvi Kivity sgdt(&orig_gdt); 634cb615a4dSAvi Kivity lgdt(&gdt_ptr); 635cb615a4dSAvi Kivity *mem = 0x8; 636cb615a4dSAvi Kivity asm volatile("lldt %0" : : "m"(*mem)); 637cb615a4dSAvi Kivity lgdt(&orig_gdt); 638cb615a4dSAvi Kivity sti(); 639a299895bSThomas Huth report(sldt() == *mem, "lldt"); 640cb615a4dSAvi Kivity } 6414425dba6SPeter Feiner #endif 642cb615a4dSAvi Kivity 64358a9d81eSAvi Kivity static void test_ltr(volatile uint16_t *mem) 64458a9d81eSAvi Kivity { 64558a9d81eSAvi Kivity struct descriptor_table_ptr gdt_ptr; 64658a9d81eSAvi Kivity uint64_t *gdt, *trp; 64758a9d81eSAvi Kivity uint16_t tr = str(); 64858a9d81eSAvi Kivity uint64_t busy_mask = (uint64_t)1 << 41; 64958a9d81eSAvi Kivity 65058a9d81eSAvi Kivity sgdt(&gdt_ptr); 65158a9d81eSAvi Kivity gdt = (uint64_t *)gdt_ptr.base; 65258a9d81eSAvi Kivity trp = &gdt[tr >> 3]; 65358a9d81eSAvi Kivity *trp &= ~busy_mask; 65458a9d81eSAvi Kivity *mem = tr; 65558a9d81eSAvi Kivity asm volatile("ltr %0" : : "m"(*mem) : "memory"); 656a299895bSThomas Huth report(str() == tr && (*trp & busy_mask), "ltr"); 65758a9d81eSAvi Kivity } 65858a9d81eSAvi Kivity 659215ad64cSSean Christopherson static void test_mov(void *mem) 660215ad64cSSean Christopherson { 661215ad64cSSean Christopherson unsigned long t1, t2; 662215ad64cSSean Christopherson 663215ad64cSSean Christopherson // test mov reg, r/m and mov r/m, reg 664bbdb7433SSean Christopherson t1 = 0x123456789abcdefull & -1ul; 665215ad64cSSean Christopherson asm volatile("mov %[t1], (%[mem]) \n\t" 666215ad64cSSean Christopherson "mov (%[mem]), %[t2]" 667215ad64cSSean Christopherson : [t2]"=r"(t2) 668215ad64cSSean Christopherson : [t1]"r"(t1), [mem]"r"(mem) 669215ad64cSSean Christopherson : "memory"); 670bbdb7433SSean Christopherson report(t2 == (0x123456789abcdefull & -1ul), "mov reg, r/m (1)"); 671215ad64cSSean Christopherson } 672215ad64cSSean Christopherson 6736cff92ddSAvi Kivity static void test_simplealu(u32 *mem) 6746cff92ddSAvi Kivity { 6756cff92ddSAvi Kivity *mem = 0x1234; 6766cff92ddSAvi Kivity asm("or %1, %0" : "+m"(*mem) : "r"(0x8001)); 677a299895bSThomas Huth report(*mem == 0x9235, "or"); 6786cff92ddSAvi Kivity asm("add %1, %0" : "+m"(*mem) : "r"(2)); 679a299895bSThomas Huth report(*mem == 0x9237, "add"); 6806cff92ddSAvi Kivity asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111)); 681a299895bSThomas Huth report(*mem == 0x8326, "xor"); 6826cff92ddSAvi Kivity asm("sub %1, %0" : "+m"(*mem) : "r"(0x26)); 683a299895bSThomas Huth report(*mem == 0x8300, "sub"); 6846cff92ddSAvi Kivity asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100)); 685a299895bSThomas Huth report(*mem == 0x8400, "adc(0)"); 6866cff92ddSAvi Kivity asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100)); 687a299895bSThomas Huth report(*mem == 0x8501, "adc(0)"); 6886cff92ddSAvi Kivity asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0)); 689a299895bSThomas Huth report(*mem == 0x8501, "sbb(0)"); 6906cff92ddSAvi Kivity asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0)); 691a299895bSThomas Huth report(*mem == 0x8500, "sbb(1)"); 6926cff92ddSAvi Kivity asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77)); 693a299895bSThomas Huth report(*mem == 0x8400, "and"); 6946cff92ddSAvi Kivity asm("test %1, %0" : "+m"(*mem) : "r"(0xf000)); 695a299895bSThomas Huth report(*mem == 0x8400, "test"); 6966cff92ddSAvi Kivity } 6976cff92ddSAvi Kivity 69870bdcadbSNadav Amit static void test_illegal_movbe(void) 69970bdcadbSNadav Amit { 7003af47210SMichal Luczaj unsigned int vector; 7013af47210SMichal Luczaj 702badc98caSKrish Sadhukhan if (!this_cpu_has(X86_FEATURE_MOVBE)) { 7033af47210SMichal Luczaj report_skip("MOVBE unsupported by CPU"); 70470bdcadbSNadav Amit return; 70570bdcadbSNadav Amit } 70670bdcadbSNadav Amit 7073af47210SMichal Luczaj asm volatile(ASM_TRY("1f") 7083af47210SMichal Luczaj ".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t" 7093af47210SMichal Luczaj "1:" 7103af47210SMichal Luczaj : : : "memory", "rax"); 7113af47210SMichal Luczaj 7123af47210SMichal Luczaj vector = exception_vector(); 7133af47210SMichal Luczaj report(vector == UD_VECTOR, 7143af47210SMichal Luczaj "Wanted #UD on MOVBE with /reg, got vector = %u", vector); 71570bdcadbSNadav Amit } 71670bdcadbSNadav Amit 717c4d38af0SMichal Luczaj #ifdef __x86_64__ 718c4d38af0SMichal Luczaj #define RIP_RELATIVE "(%%rip)" 719c4d38af0SMichal Luczaj #else 720c4d38af0SMichal Luczaj #define RIP_RELATIVE "" 721c4d38af0SMichal Luczaj #endif 722c4d38af0SMichal Luczaj 723c4d38af0SMichal Luczaj static void handle_db(struct ex_regs *regs) 724c4d38af0SMichal Luczaj { 725c4d38af0SMichal Luczaj ++exceptions; 726c4d38af0SMichal Luczaj regs->rflags |= X86_EFLAGS_RF; 727c4d38af0SMichal Luczaj } 728c4d38af0SMichal Luczaj 729c4d38af0SMichal Luczaj static void test_mov_pop_ss_code_db(void) 730c4d38af0SMichal Luczaj { 731c4d38af0SMichal Luczaj handler old_db_handler = handle_exception(DB_VECTOR, handle_db); 732c4d38af0SMichal Luczaj bool fep_available = is_fep_available(); 733c4d38af0SMichal Luczaj /* On Intel, code #DBs are inhibited when MOV/POP SS blocking is active. */ 734c4d38af0SMichal Luczaj int nr_expected = is_intel() ? 0 : 1; 735c4d38af0SMichal Luczaj 736c4d38af0SMichal Luczaj write_dr7(DR7_FIXED_1 | 737c4d38af0SMichal Luczaj DR7_GLOBAL_ENABLE_DRx(0) | 738c4d38af0SMichal Luczaj DR7_EXECUTE_DRx(0) | 739c4d38af0SMichal Luczaj DR7_LEN_1_DRx(0)); 740c4d38af0SMichal Luczaj 741c4d38af0SMichal Luczaj #define MOV_POP_SS_DB(desc, fep1, fep2, insn, store_ss, load_ss) \ 742c4d38af0SMichal Luczaj ({ \ 743c4d38af0SMichal Luczaj unsigned long r; \ 744c4d38af0SMichal Luczaj \ 745c4d38af0SMichal Luczaj exceptions = 0; \ 746c4d38af0SMichal Luczaj asm volatile("lea 1f " RIP_RELATIVE ", %0\n\t" \ 747c4d38af0SMichal Luczaj "mov %0, %%dr0\n\t" \ 748c4d38af0SMichal Luczaj store_ss \ 749c4d38af0SMichal Luczaj fep1 load_ss \ 750c4d38af0SMichal Luczaj fep2 "1: xor %0, %0\n\t" \ 751c4d38af0SMichal Luczaj "2:" \ 752c4d38af0SMichal Luczaj : "=r" (r) \ 753c4d38af0SMichal Luczaj : \ 754c4d38af0SMichal Luczaj : "memory"); \ 755c4d38af0SMichal Luczaj report(exceptions == nr_expected && !r, \ 756c4d38af0SMichal Luczaj desc ": #DB %s after " insn " SS", \ 757c4d38af0SMichal Luczaj nr_expected ? "occurred" : "suppressed"); \ 758c4d38af0SMichal Luczaj }) 759c4d38af0SMichal Luczaj 760c4d38af0SMichal Luczaj #define MOV_SS_DB(desc, fep1, fep2) \ 761c4d38af0SMichal Luczaj MOV_POP_SS_DB(desc, fep1, fep2, "MOV", \ 762c4d38af0SMichal Luczaj "mov %%ss, %0\n\t", "mov %0, %%ss\n\t") 763c4d38af0SMichal Luczaj 764c4d38af0SMichal Luczaj MOV_SS_DB("no fep", "", ""); 765c4d38af0SMichal Luczaj if (fep_available) { 766c4d38af0SMichal Luczaj MOV_SS_DB("fep MOV-SS", KVM_FEP, ""); 767c4d38af0SMichal Luczaj MOV_SS_DB("fep XOR", "", KVM_FEP); 768c4d38af0SMichal Luczaj MOV_SS_DB("fep MOV-SS/fep XOR", KVM_FEP, KVM_FEP); 769c4d38af0SMichal Luczaj } 770c4d38af0SMichal Luczaj 771c4d38af0SMichal Luczaj /* PUSH/POP SS are invalid in 64-bit mode. */ 772c4d38af0SMichal Luczaj #ifndef __x86_64__ 773c4d38af0SMichal Luczaj #define POP_SS_DB(desc, fep1, fep2) \ 774c4d38af0SMichal Luczaj MOV_POP_SS_DB(desc, fep1, fep2, "POP", \ 775c4d38af0SMichal Luczaj "push %%ss\n\t", "pop %%ss\n\t") 776c4d38af0SMichal Luczaj 777c4d38af0SMichal Luczaj POP_SS_DB("no fep", "", ""); 778c4d38af0SMichal Luczaj if (fep_available) { 779c4d38af0SMichal Luczaj POP_SS_DB("fep POP-SS", KVM_FEP, ""); 780c4d38af0SMichal Luczaj POP_SS_DB("fep XOR", "", KVM_FEP); 781c4d38af0SMichal Luczaj POP_SS_DB("fep POP-SS/fep XOR", KVM_FEP, KVM_FEP); 782c4d38af0SMichal Luczaj } 783c4d38af0SMichal Luczaj #endif 784c4d38af0SMichal Luczaj 785c4d38af0SMichal Luczaj write_dr7(DR7_FIXED_1); 786c4d38af0SMichal Luczaj 787c4d38af0SMichal Luczaj handle_exception(DB_VECTOR, old_db_handler); 788c4d38af0SMichal Luczaj } 789c4d38af0SMichal Luczaj 7907db17e21SThomas Huth int main(void) 7917d36db35SAvi Kivity { 7927d36db35SAvi Kivity void *mem; 793e5e76263SJacob Xu void *cross_mem; 7947d36db35SAvi Kivity 795bbdb7433SSean Christopherson if (!is_fep_available()) 796bbdb7433SSean Christopherson report_skip("Skipping tests the require forced emulation, " 797bbdb7433SSean Christopherson "use kvm.force_emulation_prefix=1 to enable"); 798bbdb7433SSean Christopherson 7997d36db35SAvi Kivity setup_vm(); 80045fdc228SPaolo Bonzini 801ec278ce3SAvi Kivity mem = alloc_vpages(2); 802ec278ce3SAvi Kivity install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem); 803ec278ce3SAvi Kivity // install the page twice to test cross-page mmio 804ec278ce3SAvi Kivity install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096); 805e5e76263SJacob Xu cross_mem = vmap(virt_to_phys(alloc_pages(2)), 2 * PAGE_SIZE); 8067d36db35SAvi Kivity 807215ad64cSSean Christopherson test_mov(mem); 8086cff92ddSAvi Kivity test_simplealu(mem); 8097d36db35SAvi Kivity test_cmps(mem); 81080a4ea7bSAvi Kivity test_scas(mem); 8114003963dSNadav Amit test_smsw(mem); 8127d36db35SAvi Kivity test_lmsw(); 8137d36db35SAvi Kivity test_stringio(); 8147d36db35SAvi Kivity test_incdecnotneg(mem); 815d4655eafSWei Yongjun test_btc(mem); 8162e16c7f6SWei Yongjun test_bsfbsr(mem); 81751d65a3cSAvi Kivity test_imul(mem); 818d7f3ee3cSAvi Kivity test_sse(mem); 819e5e76263SJacob Xu test_sse_exceptions(cross_mem); 820b212fcdaSAvi Kivity test_shld_shrd(mem); 82147c1461aSAvi Kivity //test_lgdt_lidt(mem); 8224425dba6SPeter Feiner //test_lldt(mem); 82358a9d81eSAvi Kivity test_ltr(mem); 8247d36db35SAvi Kivity 8257948d4b6SSean Christopherson if (is_fep_available()) { 82645fdc228SPaolo Bonzini test_smsw_reg(mem); 82745fdc228SPaolo Bonzini test_nop(mem); 82845fdc228SPaolo Bonzini test_mov_dr(mem); 8290dcb3fbaSMichal Luczaj test_illegal_lea(); 83045fdc228SPaolo Bonzini } 83145fdc228SPaolo Bonzini 832ec278ce3SAvi Kivity test_crosspage_mmio(mem); 833ec278ce3SAvi Kivity 834a19c7db7SXiao Guangrong test_string_io_mmio(mem); 83570bdcadbSNadav Amit test_illegal_movbe(); 836c4d38af0SMichal Luczaj test_mov_pop_ss_code_db(); 837f413c1afSNadav Amit 838bbdb7433SSean Christopherson #ifdef __x86_64__ 839bbdb7433SSean Christopherson test_emulator_64(mem); 840bbdb7433SSean Christopherson #endif 841f3cdd159SJan Kiszka return report_summary(); 8427d36db35SAvi Kivity } 843