1 #include <asm/debugreg.h> 2 3 #include "ioram.h" 4 #include "vm.h" 5 #include "libcflat.h" 6 #include "desc.h" 7 #include "types.h" 8 #include "processor.h" 9 #include "vmalloc.h" 10 #include "alloc_page.h" 11 #include "usermode.h" 12 13 #define TESTDEV_IO_PORT 0xe0 14 15 #define MAGIC_NUM 0xdeadbeefdeadbeefUL 16 #define GS_BASE 0x400000 17 18 static int exceptions; 19 20 /* Forced emulation prefix, used to invoke the emulator unconditionally. */ 21 #define KVM_FEP "ud2; .byte 'k', 'v', 'm';" 22 #define KVM_FEP_LENGTH 5 23 static int fep_available = 1; 24 25 struct regs { 26 u64 rax, rbx, rcx, rdx; 27 u64 rsi, rdi, rsp, rbp; 28 u64 r8, r9, r10, r11; 29 u64 r12, r13, r14, r15; 30 u64 rip, rflags; 31 }; 32 struct regs inregs, outregs, save; 33 34 struct insn_desc { 35 u64 ptr; 36 size_t len; 37 }; 38 39 static char st1[] = "abcdefghijklmnop"; 40 41 static void test_stringio(void) 42 { 43 unsigned char r = 0; 44 asm volatile("cld \n\t" 45 "movw %0, %%dx \n\t" 46 "rep outsb \n\t" 47 : : "i"((short)TESTDEV_IO_PORT), 48 "S"(st1), "c"(sizeof(st1) - 1)); 49 asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT)); 50 report(r == st1[sizeof(st1) - 2], "outsb up"); /* last char */ 51 52 asm volatile("std \n\t" 53 "movw %0, %%dx \n\t" 54 "rep outsb \n\t" 55 : : "i"((short)TESTDEV_IO_PORT), 56 "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1)); 57 asm volatile("cld \n\t" : : ); 58 asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT)); 59 report(r == st1[0], "outsb down"); 60 } 61 62 static void test_cmps_one(unsigned char *m1, unsigned char *m3) 63 { 64 void *rsi, *rdi; 65 long rcx, tmp; 66 67 rsi = m1; rdi = m3; rcx = 30; 68 asm volatile("xor %[tmp], %[tmp] \n\t" 69 "repe cmpsb" 70 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 71 : : "cc"); 72 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)"); 73 74 rsi = m1; rdi = m3; rcx = 30; 75 asm volatile("or $1, %[tmp]\n\t" // clear ZF 76 "repe cmpsb" 77 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 78 : : "cc"); 79 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, 80 "repe cmpsb (1.zf)"); 81 82 rsi = m1; rdi = m3; rcx = 15; 83 asm volatile("xor %[tmp], %[tmp] \n\t" 84 "repe cmpsw" 85 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 86 : : "cc"); 87 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)"); 88 89 rsi = m1; rdi = m3; rcx = 7; 90 asm volatile("xor %[tmp], %[tmp] \n\t" 91 "repe cmpsl" 92 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 93 : : "cc"); 94 report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)"); 95 96 rsi = m1; rdi = m3; rcx = 4; 97 asm volatile("xor %[tmp], %[tmp] \n\t" 98 "repe cmpsq" 99 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 100 : : "cc"); 101 report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)"); 102 103 rsi = m1; rdi = m3; rcx = 130; 104 asm volatile("xor %[tmp], %[tmp] \n\t" 105 "repe cmpsb" 106 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 107 : : "cc"); 108 report(rcx == 29 && rsi == m1 + 101 && rdi == m3 + 101, 109 "repe cmpsb (2)"); 110 111 rsi = m1; rdi = m3; rcx = 65; 112 asm volatile("xor %[tmp], %[tmp] \n\t" 113 "repe cmpsw" 114 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 115 : : "cc"); 116 report(rcx == 14 && rsi == m1 + 102 && rdi == m3 + 102, 117 "repe cmpsw (2)"); 118 119 rsi = m1; rdi = m3; rcx = 32; 120 asm volatile("xor %[tmp], %[tmp] \n\t" 121 "repe cmpsl" 122 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 123 : : "cc"); 124 report(rcx == 6 && rsi == m1 + 104 && rdi == m3 + 104, 125 "repe cmpll (2)"); 126 127 rsi = m1; rdi = m3; rcx = 16; 128 asm volatile("xor %[tmp], %[tmp] \n\t" 129 "repe cmpsq" 130 : "+S"(rsi), "+D"(rdi), "+c"(rcx), [tmp]"=&r"(tmp) 131 : : "cc"); 132 report(rcx == 3 && rsi == m1 + 104 && rdi == m3 + 104, 133 "repe cmpsq (2)"); 134 135 } 136 137 static void test_cmps(void *mem) 138 { 139 unsigned char *m1 = mem, *m2 = mem + 1024; 140 unsigned char m3[1024]; 141 142 for (int i = 0; i < 100; ++i) 143 m1[i] = m2[i] = m3[i] = i; 144 for (int i = 100; i < 200; ++i) 145 m1[i] = (m3[i] = m2[i] = i) + 1; 146 test_cmps_one(m1, m3); 147 test_cmps_one(m1, m2); 148 } 149 150 static void test_scas(void *mem) 151 { 152 bool z; 153 void *di; 154 155 *(ulong *)mem = 0x77665544332211; 156 157 di = mem; 158 asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff11)); 159 report(di == mem + 1 && z, "scasb match"); 160 161 di = mem; 162 asm ("scasb; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff54)); 163 report(di == mem + 1 && !z, "scasb mismatch"); 164 165 di = mem; 166 asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff2211)); 167 report(di == mem + 2 && z, "scasw match"); 168 169 di = mem; 170 asm ("scasw; setz %0" : "=rm"(z), "+D"(di) : "a"(0xffdd11)); 171 report(di == mem + 2 && !z, "scasw mismatch"); 172 173 di = mem; 174 asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0xff44332211ul)); 175 report(di == mem + 4 && z, "scasd match"); 176 177 di = mem; 178 asm ("scasl; setz %0" : "=rm"(z), "+D"(di) : "a"(0x45332211)); 179 report(di == mem + 4 && !z, "scasd mismatch"); 180 181 di = mem; 182 asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(0x77665544332211ul)); 183 report(di == mem + 8 && z, "scasq match"); 184 185 di = mem; 186 asm ("scasq; setz %0" : "=rm"(z), "+D"(di) : "a"(3)); 187 report(di == mem + 8 && !z, "scasq mismatch"); 188 } 189 190 static void test_cr8(void) 191 { 192 unsigned long src, dst; 193 194 dst = 777; 195 src = 3; 196 asm volatile("mov %[src], %%cr8; mov %%cr8, %[dst]" 197 : [dst]"+r"(dst), [src]"+r"(src)); 198 report(dst == 3 && src == 3, "mov %%cr8"); 199 } 200 201 static void test_push(void *mem) 202 { 203 unsigned long tmp; 204 unsigned long *stack_top = mem + 4096; 205 unsigned long *new_stack_top; 206 unsigned long memw = 0x123456789abcdeful; 207 208 memset(mem, 0x55, (void *)stack_top - mem); 209 210 asm volatile("mov %%rsp, %[tmp] \n\t" 211 "mov %[stack_top], %%rsp \n\t" 212 "pushq $-7 \n\t" 213 "pushq %[reg] \n\t" 214 "pushq (%[mem]) \n\t" 215 "pushq $-7070707 \n\t" 216 "mov %%rsp, %[new_stack_top] \n\t" 217 "mov %[tmp], %%rsp" 218 : [tmp]"=&r"(tmp), [new_stack_top]"=r"(new_stack_top) 219 : [stack_top]"r"(stack_top), 220 [reg]"r"(-17l), [mem]"r"(&memw) 221 : "memory"); 222 223 report(stack_top[-1] == -7ul, "push $imm8"); 224 report(stack_top[-2] == -17ul, "push %%reg"); 225 report(stack_top[-3] == 0x123456789abcdeful, "push mem"); 226 report(stack_top[-4] == -7070707, "push $imm"); 227 } 228 229 static void test_pop(void *mem) 230 { 231 unsigned long tmp, tmp3, rsp, rbp; 232 unsigned long *stack_top = mem + 4096; 233 unsigned long memw = 0x123456789abcdeful; 234 static unsigned long tmp2; 235 236 memset(mem, 0x55, (void *)stack_top - mem); 237 238 asm volatile("pushq %[val] \n\t" 239 "popq (%[mem])" 240 : : [val]"m"(memw), [mem]"r"(mem) : "memory"); 241 report(*(unsigned long *)mem == memw, "pop mem"); 242 243 memw = 7 - memw; 244 asm volatile("mov %%rsp, %[tmp] \n\t" 245 "mov %[stack_top], %%rsp \n\t" 246 "pushq %[val] \n\t" 247 "popq %[tmp2] \n\t" 248 "mov %[tmp], %%rsp" 249 : [tmp]"=&r"(tmp), [tmp2]"=m"(tmp2) 250 : [val]"r"(memw), [stack_top]"r"(stack_top) 251 : "memory"); 252 report(tmp2 == memw, "pop mem (2)"); 253 254 memw = 129443 - memw; 255 asm volatile("mov %%rsp, %[tmp] \n\t" 256 "mov %[stack_top], %%rsp \n\t" 257 "pushq %[val] \n\t" 258 "popq %[tmp2] \n\t" 259 "mov %[tmp], %%rsp" 260 : [tmp]"=&r"(tmp), [tmp2]"=r"(tmp2) 261 : [val]"r"(memw), [stack_top]"r"(stack_top) 262 : "memory"); 263 report(tmp2 == memw, "pop reg"); 264 265 asm volatile("mov %%rsp, %[tmp] \n\t" 266 "mov %[stack_top], %%rsp \n\t" 267 "lea 1f(%%rip), %%rax \n\t" 268 "push %%rax \n\t" 269 "ret \n\t" 270 "2: jmp 2b \n\t" 271 "1: mov %[tmp], %%rsp" 272 : [tmp]"=&r"(tmp) : [stack_top]"r"(stack_top) 273 : "memory", "rax"); 274 report_pass("ret"); 275 276 stack_top[-1] = 0x778899; 277 asm volatile("mov %[stack_top], %%r8 \n\t" 278 "mov %%rsp, %%r9 \n\t" 279 "xchg %%rbp, %%r8 \n\t" 280 "leave \n\t" 281 "xchg %%rsp, %%r9 \n\t" 282 "xchg %%rbp, %%r8 \n\t" 283 "mov %%r9, %[tmp] \n\t" 284 "mov %%r8, %[tmp3]" 285 : [tmp]"=&r"(tmp), [tmp3]"=&r"(tmp3) : [stack_top]"r"(stack_top-1) 286 : "memory", "r8", "r9"); 287 report(tmp == (ulong)stack_top && tmp3 == 0x778899, "leave"); 288 289 rbp = 0xaa55aa55bb66bb66ULL; 290 rsp = (unsigned long)stack_top; 291 asm volatile("mov %[rsp], %%r8 \n\t" 292 "mov %[rbp], %%r9 \n\t" 293 "xchg %%rsp, %%r8 \n\t" 294 "xchg %%rbp, %%r9 \n\t" 295 "enter $0x1238, $0 \n\t" 296 "xchg %%rsp, %%r8 \n\t" 297 "xchg %%rbp, %%r9 \n\t" 298 "xchg %%r8, %[rsp] \n\t" 299 "xchg %%r9, %[rbp]" 300 : [rsp]"+a"(rsp), [rbp]"+b"(rbp) : : "memory", "r8", "r9"); 301 report(rsp == (unsigned long)stack_top - 8 - 0x1238 302 && rbp == (unsigned long)stack_top - 8 303 && stack_top[-1] == 0xaa55aa55bb66bb66ULL, 304 "enter"); 305 } 306 307 static void test_ljmp(void *mem) 308 { 309 unsigned char *m = mem; 310 volatile int res = 1; 311 312 *(unsigned long**)m = &&jmpf; 313 asm volatile ("data16 mov %%cs, %0":"=m"(*(m + sizeof(unsigned long)))); 314 asm volatile ("rex64 ljmp *%0"::"m"(*m)); 315 res = 0; 316 jmpf: 317 report(res, "ljmp"); 318 } 319 320 static void test_incdecnotneg(void *mem) 321 { 322 unsigned long *m = mem, v = 1234; 323 unsigned char *mb = mem, vb = 66; 324 325 *m = 0; 326 327 asm volatile ("incl %0":"+m"(*m)); 328 report(*m == 1, "incl"); 329 asm volatile ("decl %0":"+m"(*m)); 330 report(*m == 0, "decl"); 331 asm volatile ("incb %0":"+m"(*m)); 332 report(*m == 1, "incb"); 333 asm volatile ("decb %0":"+m"(*m)); 334 report(*m == 0, "decb"); 335 336 asm volatile ("lock incl %0":"+m"(*m)); 337 report(*m == 1, "lock incl"); 338 asm volatile ("lock decl %0":"+m"(*m)); 339 report(*m == 0, "lock decl"); 340 asm volatile ("lock incb %0":"+m"(*m)); 341 report(*m == 1, "lock incb"); 342 asm volatile ("lock decb %0":"+m"(*m)); 343 report(*m == 0, "lock decb"); 344 345 *m = v; 346 347 asm ("lock negq %0" : "+m"(*m)); v = -v; 348 report(*m == v, "lock negl"); 349 asm ("lock notq %0" : "+m"(*m)); v = ~v; 350 report(*m == v, "lock notl"); 351 352 *mb = vb; 353 354 asm ("lock negb %0" : "+m"(*mb)); vb = -vb; 355 report(*mb == vb, "lock negb"); 356 asm ("lock notb %0" : "+m"(*mb)); vb = ~vb; 357 report(*mb == vb, "lock notb"); 358 } 359 360 static void test_smsw(uint64_t *h_mem) 361 { 362 char mem[16]; 363 unsigned short msw, msw_orig, *pmsw; 364 int i, zero; 365 366 msw_orig = read_cr0(); 367 368 asm("smsw %0" : "=r"(msw)); 369 report(msw == msw_orig, "smsw (1)"); 370 371 memset(mem, 0, 16); 372 pmsw = (void *)mem; 373 asm("smsw %0" : "=m"(pmsw[4])); 374 zero = 1; 375 for (i = 0; i < 8; ++i) 376 if (i != 4 && pmsw[i]) 377 zero = 0; 378 report(msw == pmsw[4] && zero, "smsw (2)"); 379 380 /* Trigger exit on smsw */ 381 *h_mem = 0x12345678abcdeful; 382 asm volatile("smsw %0" : "+m"(*h_mem)); 383 report(msw == (unsigned short)*h_mem && 384 (*h_mem & ~0xfffful) == 0x12345678ab0000ul, "smsw (3)"); 385 } 386 387 static void test_lmsw(void) 388 { 389 char mem[16]; 390 unsigned short msw, *pmsw; 391 unsigned long cr0; 392 393 cr0 = read_cr0(); 394 395 msw = cr0 ^ 8; 396 asm("lmsw %0" : : "r"(msw)); 397 printf("before %lx after %lx\n", cr0, read_cr0()); 398 report((cr0 ^ read_cr0()) == 8, "lmsw (1)"); 399 400 pmsw = (void *)mem; 401 *pmsw = cr0; 402 asm("lmsw %0" : : "m"(*pmsw)); 403 printf("before %lx after %lx\n", cr0, read_cr0()); 404 report(cr0 == read_cr0(), "lmsw (2)"); 405 406 /* lmsw can't clear cr0.pe */ 407 msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */ 408 asm("lmsw %0" : : "r"(msw)); 409 report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)"); 410 411 /* back to normal */ 412 msw = cr0; 413 asm("lmsw %0" : : "r"(msw)); 414 } 415 416 static void test_xchg(void *mem) 417 { 418 unsigned long *memq = mem; 419 unsigned long rax; 420 421 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 422 "mov %%rax, (%[memq])\n\t" 423 "mov $0xfedcba9876543210, %%rax\n\t" 424 "xchg %%al, (%[memq])\n\t" 425 "mov %%rax, %[rax]\n\t" 426 : [rax]"=r"(rax) 427 : [memq]"r"(memq) 428 : "memory", "rax"); 429 report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcd10, 430 "xchg reg, r/m (1)"); 431 432 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 433 "mov %%rax, (%[memq])\n\t" 434 "mov $0xfedcba9876543210, %%rax\n\t" 435 "xchg %%ax, (%[memq])\n\t" 436 "mov %%rax, %[rax]\n\t" 437 : [rax]"=r"(rax) 438 : [memq]"r"(memq) 439 : "memory", "rax"); 440 report(rax == 0xfedcba987654cdef && *memq == 0x123456789ab3210, 441 "xchg reg, r/m (2)"); 442 443 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 444 "mov %%rax, (%[memq])\n\t" 445 "mov $0xfedcba9876543210, %%rax\n\t" 446 "xchg %%eax, (%[memq])\n\t" 447 "mov %%rax, %[rax]\n\t" 448 : [rax]"=r"(rax) 449 : [memq]"r"(memq) 450 : "memory", "rax"); 451 report(rax == 0x89abcdef && *memq == 0x123456776543210, 452 "xchg reg, r/m (3)"); 453 454 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 455 "mov %%rax, (%[memq])\n\t" 456 "mov $0xfedcba9876543210, %%rax\n\t" 457 "xchg %%rax, (%[memq])\n\t" 458 "mov %%rax, %[rax]\n\t" 459 : [rax]"=r"(rax) 460 : [memq]"r"(memq) 461 : "memory", "rax"); 462 report(rax == 0x123456789abcdef && *memq == 0xfedcba9876543210, 463 "xchg reg, r/m (4)"); 464 } 465 466 static void test_xadd(void *mem) 467 { 468 unsigned long *memq = mem; 469 unsigned long rax; 470 471 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 472 "mov %%rax, (%[memq])\n\t" 473 "mov $0xfedcba9876543210, %%rax\n\t" 474 "xadd %%al, (%[memq])\n\t" 475 "mov %%rax, %[rax]\n\t" 476 : [rax]"=r"(rax) 477 : [memq]"r"(memq) 478 : "memory", "rax"); 479 report(rax == 0xfedcba98765432ef && *memq == 0x123456789abcdff, 480 "xadd reg, r/m (1)"); 481 482 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 483 "mov %%rax, (%[memq])\n\t" 484 "mov $0xfedcba9876543210, %%rax\n\t" 485 "xadd %%ax, (%[memq])\n\t" 486 "mov %%rax, %[rax]\n\t" 487 : [rax]"=r"(rax) 488 : [memq]"r"(memq) 489 : "memory", "rax"); 490 report(rax == 0xfedcba987654cdef && *memq == 0x123456789abffff, 491 "xadd reg, r/m (2)"); 492 493 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 494 "mov %%rax, (%[memq])\n\t" 495 "mov $0xfedcba9876543210, %%rax\n\t" 496 "xadd %%eax, (%[memq])\n\t" 497 "mov %%rax, %[rax]\n\t" 498 : [rax]"=r"(rax) 499 : [memq]"r"(memq) 500 : "memory", "rax"); 501 report(rax == 0x89abcdef && *memq == 0x1234567ffffffff, 502 "xadd reg, r/m (3)"); 503 504 asm volatile("mov $0x123456789abcdef, %%rax\n\t" 505 "mov %%rax, (%[memq])\n\t" 506 "mov $0xfedcba9876543210, %%rax\n\t" 507 "xadd %%rax, (%[memq])\n\t" 508 "mov %%rax, %[rax]\n\t" 509 : [rax]"=r"(rax) 510 : [memq]"r"(memq) 511 : "memory", "rax"); 512 report(rax == 0x123456789abcdef && *memq == 0xffffffffffffffff, 513 "xadd reg, r/m (4)"); 514 } 515 516 static void test_btc(void *mem) 517 { 518 unsigned int *a = mem; 519 520 memset(mem, 0, 4 * sizeof(unsigned int)); 521 522 asm ("btcl $32, %0" :: "m"(a[0]) : "memory"); 523 asm ("btcl $1, %0" :: "m"(a[1]) : "memory"); 524 asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory"); 525 report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m"); 526 527 asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory"); 528 report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m"); 529 530 asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory"); 531 report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0, 532 "btcq reg, r/m"); 533 } 534 535 static void test_bsfbsr(void *mem) 536 { 537 unsigned long rax, *memq = mem; 538 unsigned eax, *meml = mem; 539 unsigned short ax, *memw = mem; 540 unsigned char z; 541 542 *memw = 0xc000; 543 asm("bsfw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw)); 544 report(ax == 14, "bsfw r/m, reg"); 545 546 *meml = 0xc0000000; 547 asm("bsfl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml)); 548 report(eax == 30, "bsfl r/m, reg"); 549 550 *memq = 0xc00000000000; 551 asm("bsfq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq)); 552 report(rax == 46, "bsfq r/m, reg"); 553 554 *memq = 0; 555 asm("bsfq %[mem], %[a]; setz %[z]" 556 : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq)); 557 report(z == 1, "bsfq r/m, reg"); 558 559 *memw = 0xc000; 560 asm("bsrw %[mem], %[a]" : [a]"=a"(ax) : [mem]"m"(*memw)); 561 report(ax == 15, "bsrw r/m, reg"); 562 563 *meml = 0xc0000000; 564 asm("bsrl %[mem], %[a]" : [a]"=a"(eax) : [mem]"m"(*meml)); 565 report(eax == 31, "bsrl r/m, reg"); 566 567 *memq = 0xc00000000000; 568 asm("bsrq %[mem], %[a]" : [a]"=a"(rax) : [mem]"m"(*memq)); 569 report(rax == 47, "bsrq r/m, reg"); 570 571 *memq = 0; 572 asm("bsrq %[mem], %[a]; setz %[z]" 573 : [a]"=a"(rax), [z]"=rm"(z) : [mem]"m"(*memq)); 574 report(z == 1, "bsrq r/m, reg"); 575 } 576 577 static void test_imul(ulong *mem) 578 { 579 ulong a; 580 581 *mem = 51; a = 0x1234567812345678UL; 582 asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem)); 583 report(a == 0x12345678123439e8, "imul ax, mem"); 584 585 *mem = 51; a = 0x1234567812345678UL; 586 asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem)); 587 report(a == 0xa06d39e8, "imul eax, mem"); 588 589 *mem = 51; a = 0x1234567812345678UL; 590 asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem)); 591 report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem"); 592 593 *mem = 0x1234567812345678UL; a = 0x8765432187654321L; 594 asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem)); 595 report(a == 0x87654321876539e8, "imul ax, mem, imm8"); 596 597 *mem = 0x1234567812345678UL; 598 asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem)); 599 report(a == 0xa06d39e8, "imul eax, mem, imm8"); 600 601 *mem = 0x1234567812345678UL; 602 asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem)); 603 report(a == 0xA06D39EBA06D39E8UL, "imul rax, mem, imm8"); 604 605 *mem = 0x1234567812345678UL; a = 0x8765432187654321L; 606 asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem)); 607 report(a == 0x8765432187650bc8, "imul ax, mem, imm"); 608 609 *mem = 0x1234567812345678UL; 610 asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem)); 611 report(a == 0x1d950bc8, "imul eax, mem, imm"); 612 613 *mem = 0x1234567812345678UL; 614 asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem)); 615 report(a == 0x1D950BDE1D950BC8L, "imul rax, mem, imm"); 616 } 617 618 static void test_muldiv(long *mem) 619 { 620 long a, d, aa, dd; 621 u8 ex = 1; 622 623 *mem = 0; a = 1; d = 2; 624 asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:" 625 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem)); 626 report(a == 1 && d == 2 && ex, "divq (fault)"); 627 628 *mem = 987654321098765UL; a = 123456789012345UL; d = 123456789012345UL; 629 asm (ASM_TRY("1f") "divq %3; movb $0, %2; 1:" 630 : "+a"(a), "+d"(d), "+q"(ex) : "m"(*mem)); 631 report(a == 0x1ffffffb1b963b33ul && d == 0x273ba4384ede2ul && !ex, 632 "divq (1)"); 633 aa = 0x1111111111111111; dd = 0x2222222222222222; 634 *mem = 0x3333333333333333; a = aa; d = dd; 635 asm("mulb %2" : "+a"(a), "+d"(d) : "m"(*mem)); 636 report(a == 0x1111111111110363 && d == dd, "mulb mem"); 637 *mem = 0x3333333333333333; a = aa; d = dd; 638 asm("mulw %2" : "+a"(a), "+d"(d) : "m"(*mem)); 639 report(a == 0x111111111111c963 && d == 0x2222222222220369, "mulw mem"); 640 *mem = 0x3333333333333333; a = aa; d = dd; 641 asm("mull %2" : "+a"(a), "+d"(d) : "m"(*mem)); 642 report(a == 0x962fc963 && d == 0x369d036, "mull mem"); 643 *mem = 0x3333333333333333; a = aa; d = dd; 644 asm("mulq %2" : "+a"(a), "+d"(d) : "m"(*mem)); 645 report(a == 0x2fc962fc962fc963 && d == 0x369d0369d0369d0, "mulq mem"); 646 } 647 648 typedef unsigned __attribute__((vector_size(16))) sse128; 649 650 static bool sseeq(uint32_t *v1, uint32_t *v2) 651 { 652 bool ok = true; 653 int i; 654 655 for (i = 0; i < 4; ++i) { 656 ok &= v1[i] == v2[i]; 657 } 658 659 return ok; 660 } 661 662 static __attribute__((target("sse2"))) void test_sse(uint32_t *mem) 663 { 664 sse128 vv; 665 uint32_t *v = (uint32_t *)&vv; 666 667 write_cr0(read_cr0() & ~6); /* EM, TS */ 668 write_cr4(read_cr4() | 0x200); /* OSFXSR */ 669 memset(&vv, 0, sizeof(vv)); 670 671 #define TEST_RW_SSE(insn) do { \ 672 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; \ 673 asm(insn " %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); \ 674 report(sseeq(v, mem), insn " (read)"); \ 675 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; \ 676 asm(insn " %1, %0" : "=x"(vv) : "m"(*mem) : "memory"); \ 677 report(sseeq(v, mem), insn " (write)"); \ 678 } while (0) 679 680 TEST_RW_SSE("movdqu"); 681 TEST_RW_SSE("movaps"); 682 TEST_RW_SSE("movapd"); 683 TEST_RW_SSE("movups"); 684 TEST_RW_SSE("movupd"); 685 #undef TEST_RW_SSE 686 } 687 688 static void unaligned_movaps_handler(struct ex_regs *regs) 689 { 690 extern char unaligned_movaps_cont; 691 692 ++exceptions; 693 regs->rip = (ulong)&unaligned_movaps_cont; 694 } 695 696 static void cross_movups_handler(struct ex_regs *regs) 697 { 698 extern char cross_movups_cont; 699 700 ++exceptions; 701 regs->rip = (ulong)&cross_movups_cont; 702 } 703 704 static __attribute__((target("sse2"))) void test_sse_exceptions(void *cross_mem) 705 { 706 sse128 vv; 707 uint32_t *v = (uint32_t *)&vv; 708 uint32_t *mem; 709 uint8_t *bytes = cross_mem; // aligned on PAGE_SIZE*2 710 void *page2 = (void *)(&bytes[4096]); 711 struct pte_search search; 712 pteval_t orig_pte; 713 handler old; 714 715 // setup memory for unaligned access 716 mem = (uint32_t *)(&bytes[8]); 717 718 // test unaligned access for movups, movupd and movaps 719 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; 720 mem[0] = 5; mem[1] = 6; mem[2] = 8; mem[3] = 9; 721 asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); 722 report(sseeq(v, mem), "movups unaligned"); 723 724 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; 725 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; 726 asm("movupd %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); 727 report(sseeq(v, mem), "movupd unaligned"); 728 exceptions = 0; 729 old = handle_exception(GP_VECTOR, unaligned_movaps_handler); 730 asm("movaps %1, %0\n\t unaligned_movaps_cont:" 731 : "=m"(*mem) : "x"(vv)); 732 handle_exception(GP_VECTOR, old); 733 report(exceptions == 1, "unaligned movaps exception"); 734 735 // setup memory for cross page access 736 mem = (uint32_t *)(&bytes[4096-8]); 737 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; 738 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; 739 740 asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); 741 report(sseeq(v, mem), "movups unaligned crosspage"); 742 743 // invalidate second page 744 search = find_pte_level(current_page_table(), page2, 1); 745 orig_pte = *search.pte; 746 install_pte(current_page_table(), 1, page2, 0, NULL); 747 invlpg(page2); 748 749 exceptions = 0; 750 old = handle_exception(PF_VECTOR, cross_movups_handler); 751 asm("movups %1, %0\n\t cross_movups_cont:" : "=m"(*mem) : "x"(vv) : 752 "memory"); 753 handle_exception(PF_VECTOR, old); 754 report(exceptions == 1, "movups crosspage exception"); 755 756 // restore invalidated page 757 install_pte(current_page_table(), 1, page2, orig_pte, NULL); 758 } 759 760 static void test_mmx(uint64_t *mem) 761 { 762 uint64_t v; 763 764 write_cr0(read_cr0() & ~6); /* EM, TS */ 765 asm volatile("fninit"); 766 v = 0x0102030405060708ULL; 767 asm("movq %1, %0" : "=m"(*mem) : "y"(v)); 768 report(v == *mem, "movq (mmx, read)"); 769 *mem = 0x8070605040302010ull; 770 asm("movq %1, %0" : "=y"(v) : "m"(*mem)); 771 report(v == *mem, "movq (mmx, write)"); 772 } 773 774 static void test_rip_relative(unsigned *mem, char *insn_ram) 775 { 776 /* movb $1, mem+2(%rip) */ 777 insn_ram[0] = 0xc6; 778 insn_ram[1] = 0x05; 779 *(unsigned *)&insn_ram[2] = 2 + (char *)mem - (insn_ram + 7); 780 insn_ram[6] = 0x01; 781 /* ret */ 782 insn_ram[7] = 0xc3; 783 784 *mem = 0; 785 asm("callq *%1" : "+m"(*mem) : "r"(insn_ram)); 786 report(*mem == 0x10000, "movb $imm, 0(%%rip)"); 787 } 788 789 static void test_shld_shrd(u32 *mem) 790 { 791 *mem = 0x12345678; 792 asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3)); 793 report(*mem == ((0x12345678 << 3) | 5), "shld (cl)"); 794 *mem = 0x12345678; 795 asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3)); 796 report(*mem == ((0x12345678 >> 3) | (5u << 29)), "shrd (cl)"); 797 } 798 799 static void test_cmov(u32 *mem) 800 { 801 u64 val; 802 *mem = 0xabcdef12u; 803 asm ("movq $0x1234567812345678, %%rax\n\t" 804 "cmpl %%eax, %%eax\n\t" 805 "cmovnel (%[mem]), %%eax\n\t" 806 "movq %%rax, %[val]\n\t" 807 : [val]"=r"(val) : [mem]"r"(mem) : "%rax", "cc"); 808 report(val == 0x12345678ul, "cmovnel"); 809 } 810 811 static unsigned long rip_advance; 812 813 static void advance_rip_and_note_exception(struct ex_regs *regs) 814 { 815 ++exceptions; 816 regs->rip += rip_advance; 817 } 818 819 static void test_mmx_movq_mf(uint64_t *mem) 820 { 821 /* movq %mm0, (%rax) */ 822 extern char movq_start, movq_end; 823 handler old; 824 825 uint16_t fcw = 0; /* all exceptions unmasked */ 826 write_cr0(read_cr0() & ~6); /* TS, EM */ 827 exceptions = 0; 828 old = handle_exception(MF_VECTOR, advance_rip_and_note_exception); 829 asm volatile("fninit; fldcw %0" : : "m"(fcw)); 830 asm volatile("fldz; fldz; fdivp"); /* generate exception */ 831 832 rip_advance = &movq_end - &movq_start; 833 asm(KVM_FEP "movq_start: movq %mm0, (%rax); movq_end:"); 834 /* exit MMX mode */ 835 asm volatile("fnclex; emms"); 836 report(exceptions == 1, "movq mmx generates #MF"); 837 handle_exception(MF_VECTOR, old); 838 } 839 840 static void test_jmp_noncanonical(uint64_t *mem) 841 { 842 extern char nc_jmp_start, nc_jmp_end; 843 handler old; 844 845 *mem = 0x1111111111111111ul; 846 847 exceptions = 0; 848 rip_advance = &nc_jmp_end - &nc_jmp_start; 849 old = handle_exception(GP_VECTOR, advance_rip_and_note_exception); 850 asm volatile ("nc_jmp_start: jmp *%0; nc_jmp_end:" : : "m"(*mem)); 851 report(exceptions == 1, "jump to non-canonical address"); 852 handle_exception(GP_VECTOR, old); 853 } 854 855 static void test_movabs(uint64_t *mem) 856 { 857 /* mov $0x9090909090909090, %rcx */ 858 unsigned long rcx; 859 asm(KVM_FEP "mov $0x9090909090909090, %0" : "=c" (rcx) : "0" (0)); 860 report(rcx == 0x9090909090909090, "64-bit mov imm2"); 861 } 862 863 static void test_smsw_reg(uint64_t *mem) 864 { 865 unsigned long cr0 = read_cr0(); 866 unsigned long rax; 867 const unsigned long in_rax = 0x1234567890abcdeful; 868 869 asm(KVM_FEP "smsww %w0\n\t" : "=a" (rax) : "0" (in_rax)); 870 report((u16)rax == (u16)cr0 && rax >> 16 == in_rax >> 16, 871 "16-bit smsw reg"); 872 873 asm(KVM_FEP "smswl %k0\n\t" : "=a" (rax) : "0" (in_rax)); 874 report(rax == (u32)cr0, "32-bit smsw reg"); 875 876 asm(KVM_FEP "smswq %q0\n\t" : "=a" (rax) : "0" (in_rax)); 877 report(rax == cr0, "64-bit smsw reg"); 878 } 879 880 static void test_nop(uint64_t *mem) 881 { 882 unsigned long rax; 883 const unsigned long in_rax = 0x1234567890abcdeful; 884 asm(KVM_FEP "nop\n\t" : "=a" (rax) : "0" (in_rax)); 885 report(rax == in_rax, "nop"); 886 } 887 888 static void test_mov_dr(uint64_t *mem) 889 { 890 unsigned long rax; 891 892 asm(KVM_FEP "movq %0, %%dr6\n\t" 893 KVM_FEP "movq %%dr6, %0\n\t" : "=a" (rax) : "a" (0)); 894 895 if (this_cpu_has(X86_FEATURE_RTM)) 896 report(rax == (DR6_ACTIVE_LOW & ~DR6_RTM), "mov_dr6"); 897 else 898 report(rax == DR6_ACTIVE_LOW, "mov_dr6"); 899 } 900 901 static void test_push16(uint64_t *mem) 902 { 903 uint64_t rsp1, rsp2; 904 uint16_t r; 905 906 asm volatile ( "movq %%rsp, %[rsp1]\n\t" 907 "pushw %[v]\n\t" 908 "popw %[r]\n\t" 909 "movq %%rsp, %[rsp2]\n\t" 910 "movq %[rsp1], %%rsp\n\t" : 911 [rsp1]"=r"(rsp1), [rsp2]"=r"(rsp2), [r]"=r"(r) 912 : [v]"m"(*mem) : "memory"); 913 report(rsp1 == rsp2, "push16"); 914 } 915 916 static void test_crosspage_mmio(volatile uint8_t *mem) 917 { 918 volatile uint16_t w, *pw; 919 920 pw = (volatile uint16_t *)&mem[4095]; 921 mem[4095] = 0x99; 922 mem[4096] = 0x77; 923 asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory"); 924 report(w == 0x7799, "cross-page mmio read"); 925 asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa)); 926 report(mem[4095] == 0xaa && mem[4096] == 0x88, "cross-page mmio write"); 927 } 928 929 static void test_string_io_mmio(volatile uint8_t *mem) 930 { 931 /* Cross MMIO pages.*/ 932 volatile uint8_t *mmio = mem + 4032; 933 934 asm volatile("outw %%ax, %%dx \n\t" : : "a"(0x9999), "d"(TESTDEV_IO_PORT)); 935 936 asm volatile ("cld; rep insb" : : "d" (TESTDEV_IO_PORT), "D" (mmio), "c" (1024)); 937 938 report(mmio[1023] == 0x99, "string_io_mmio"); 939 } 940 941 /* kvm doesn't allow lidt/lgdt from mmio, so the test is disabled */ 942 #if 0 943 static void test_lgdt_lidt(volatile uint8_t *mem) 944 { 945 struct descriptor_table_ptr orig, fresh = {}; 946 947 sgdt(&orig); 948 *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) { 949 .limit = 0xf234, 950 .base = 0x12345678abcd, 951 }; 952 cli(); 953 asm volatile("lgdt %0" : : "m"(*(struct descriptor_table_ptr *)mem)); 954 sgdt(&fresh); 955 lgdt(&orig); 956 sti(); 957 report(orig.limit == fresh.limit && orig.base == fresh.base, 958 "lgdt (long address)"); 959 960 sidt(&orig); 961 *(struct descriptor_table_ptr *)mem = (struct descriptor_table_ptr) { 962 .limit = 0x432f, 963 .base = 0xdbca87654321, 964 }; 965 cli(); 966 asm volatile("lidt %0" : : "m"(*(struct descriptor_table_ptr *)mem)); 967 sidt(&fresh); 968 lidt(&orig); 969 sti(); 970 report(orig.limit == fresh.limit && orig.base == fresh.base, 971 "lidt (long address)"); 972 } 973 #endif 974 975 static void ss_bad_rpl(struct ex_regs *regs) 976 { 977 extern char ss_bad_rpl_cont; 978 979 ++exceptions; 980 regs->rip = (ulong)&ss_bad_rpl_cont; 981 } 982 983 static void test_sreg(volatile uint16_t *mem) 984 { 985 u16 ss = read_ss(); 986 handler old; 987 988 // check for null segment load 989 *mem = 0; 990 asm volatile("mov %0, %%ss" : : "m"(*mem)); 991 report(read_ss() == 0, "mov null, %%ss"); 992 993 // check for exception when ss.rpl != cpl on null segment load 994 exceptions = 0; 995 old = handle_exception(GP_VECTOR, ss_bad_rpl); 996 *mem = 3; 997 asm volatile("mov %0, %%ss; ss_bad_rpl_cont:" : : "m"(*mem)); 998 report(exceptions == 1 && read_ss() == 0, 999 "mov null, %%ss (with ss.rpl != cpl)"); 1000 handle_exception(GP_VECTOR, old); 1001 write_ss(ss); 1002 } 1003 1004 static uint64_t usr_gs_mov(void) 1005 { 1006 static uint64_t dummy = MAGIC_NUM; 1007 uint64_t dummy_ptr = (uint64_t)&dummy; 1008 uint64_t ret; 1009 1010 dummy_ptr -= GS_BASE; 1011 asm volatile("mov %%gs:(%%rcx), %%rax" : "=a"(ret): "c"(dummy_ptr) :); 1012 1013 return ret; 1014 } 1015 1016 static void test_iret(void) 1017 { 1018 uint64_t val; 1019 bool raised_vector; 1020 1021 /* Update GS base to 4MiB */ 1022 wrmsr(MSR_GS_BASE, GS_BASE); 1023 1024 /* 1025 * Per the SDM, jumping to user mode via `iret`, which is returning to 1026 * outer privilege level, for segment registers (ES, FS, GS, and DS) 1027 * if the check fails, the segment selector becomes null. 1028 * 1029 * In our test case, GS becomes null. 1030 */ 1031 val = run_in_user((usermode_func)usr_gs_mov, GP_VECTOR, 1032 0, 0, 0, 0, &raised_vector); 1033 1034 report(val == MAGIC_NUM, "Test ret/iret with a nullified segment"); 1035 } 1036 1037 /* Broken emulation causes triple fault, which skips the other tests. */ 1038 #if 0 1039 static void test_lldt(volatile uint16_t *mem) 1040 { 1041 u64 gdt[] = { 0, /* null descriptor */ 1042 #ifdef __X86_64__ 1043 0, /* ldt descriptor is 16 bytes in long mode */ 1044 #endif 1045 0x0000f82000000ffffull /* ldt descriptor */ }; 1046 struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1, 1047 .base = (ulong)&gdt }; 1048 struct descriptor_table_ptr orig_gdt; 1049 1050 cli(); 1051 sgdt(&orig_gdt); 1052 lgdt(&gdt_ptr); 1053 *mem = 0x8; 1054 asm volatile("lldt %0" : : "m"(*mem)); 1055 lgdt(&orig_gdt); 1056 sti(); 1057 report(sldt() == *mem, "lldt"); 1058 } 1059 #endif 1060 1061 static void test_ltr(volatile uint16_t *mem) 1062 { 1063 struct descriptor_table_ptr gdt_ptr; 1064 uint64_t *gdt, *trp; 1065 uint16_t tr = str(); 1066 uint64_t busy_mask = (uint64_t)1 << 41; 1067 1068 sgdt(&gdt_ptr); 1069 gdt = (uint64_t *)gdt_ptr.base; 1070 trp = &gdt[tr >> 3]; 1071 *trp &= ~busy_mask; 1072 *mem = tr; 1073 asm volatile("ltr %0" : : "m"(*mem) : "memory"); 1074 report(str() == tr && (*trp & busy_mask), "ltr"); 1075 } 1076 1077 static void test_simplealu(u32 *mem) 1078 { 1079 *mem = 0x1234; 1080 asm("or %1, %0" : "+m"(*mem) : "r"(0x8001)); 1081 report(*mem == 0x9235, "or"); 1082 asm("add %1, %0" : "+m"(*mem) : "r"(2)); 1083 report(*mem == 0x9237, "add"); 1084 asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111)); 1085 report(*mem == 0x8326, "xor"); 1086 asm("sub %1, %0" : "+m"(*mem) : "r"(0x26)); 1087 report(*mem == 0x8300, "sub"); 1088 asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100)); 1089 report(*mem == 0x8400, "adc(0)"); 1090 asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100)); 1091 report(*mem == 0x8501, "adc(0)"); 1092 asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0)); 1093 report(*mem == 0x8501, "sbb(0)"); 1094 asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0)); 1095 report(*mem == 0x8500, "sbb(1)"); 1096 asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77)); 1097 report(*mem == 0x8400, "and"); 1098 asm("test %1, %0" : "+m"(*mem) : "r"(0xf000)); 1099 report(*mem == 0x8400, "test"); 1100 } 1101 1102 static void illegal_movbe_handler(struct ex_regs *regs) 1103 { 1104 extern char bad_movbe_cont; 1105 1106 ++exceptions; 1107 regs->rip = (ulong)&bad_movbe_cont; 1108 } 1109 1110 static void test_illegal_movbe(void) 1111 { 1112 if (!this_cpu_has(X86_FEATURE_MOVBE)) { 1113 report_skip("illegal movbe"); 1114 return; 1115 } 1116 1117 exceptions = 0; 1118 handle_exception(UD_VECTOR, illegal_movbe_handler); 1119 asm volatile(".byte 0x0f; .byte 0x38; .byte 0xf0; .byte 0xc0;\n\t" 1120 " bad_movbe_cont:" : : : "rax"); 1121 report(exceptions == 1, "illegal movbe"); 1122 handle_exception(UD_VECTOR, 0); 1123 } 1124 1125 static void record_no_fep(struct ex_regs *regs) 1126 { 1127 fep_available = 0; 1128 regs->rip += KVM_FEP_LENGTH; 1129 } 1130 1131 int main(void) 1132 { 1133 void *mem; 1134 void *insn_page; 1135 void *insn_ram; 1136 void *cross_mem; 1137 unsigned long t1, t2; 1138 1139 setup_vm(); 1140 handle_exception(UD_VECTOR, record_no_fep); 1141 asm(KVM_FEP "nop"); 1142 handle_exception(UD_VECTOR, 0); 1143 1144 mem = alloc_vpages(2); 1145 install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem); 1146 // install the page twice to test cross-page mmio 1147 install_page((void *)read_cr3(), IORAM_BASE_PHYS, mem + 4096); 1148 insn_page = alloc_page(); 1149 insn_ram = vmap(virt_to_phys(insn_page), 4096); 1150 cross_mem = vmap(virt_to_phys(alloc_pages(2)), 2 * PAGE_SIZE); 1151 1152 // test mov reg, r/m and mov r/m, reg 1153 t1 = 0x123456789abcdef; 1154 asm volatile("mov %[t1], (%[mem]) \n\t" 1155 "mov (%[mem]), %[t2]" 1156 : [t2]"=r"(t2) 1157 : [t1]"r"(t1), [mem]"r"(mem) 1158 : "memory"); 1159 report(t2 == 0x123456789abcdef, "mov reg, r/m (1)"); 1160 1161 test_simplealu(mem); 1162 test_cmps(mem); 1163 test_scas(mem); 1164 1165 test_push(mem); 1166 test_pop(mem); 1167 1168 test_xchg(mem); 1169 test_xadd(mem); 1170 1171 test_cr8(); 1172 1173 test_smsw(mem); 1174 test_lmsw(); 1175 test_ljmp(mem); 1176 test_stringio(); 1177 test_incdecnotneg(mem); 1178 test_btc(mem); 1179 test_bsfbsr(mem); 1180 test_imul(mem); 1181 test_muldiv(mem); 1182 test_sse(mem); 1183 test_sse_exceptions(cross_mem); 1184 test_mmx(mem); 1185 test_rip_relative(mem, insn_ram); 1186 test_shld_shrd(mem); 1187 //test_lgdt_lidt(mem); 1188 test_sreg(mem); 1189 test_iret(); 1190 //test_lldt(mem); 1191 test_ltr(mem); 1192 test_cmov(mem); 1193 1194 if (fep_available) { 1195 test_mmx_movq_mf(mem); 1196 test_movabs(mem); 1197 test_smsw_reg(mem); 1198 test_nop(mem); 1199 test_mov_dr(mem); 1200 } else { 1201 report_skip("skipping register-only tests, " 1202 "use kvm.force_emulation_prefix=1 to enable"); 1203 } 1204 1205 test_push16(mem); 1206 test_crosspage_mmio(mem); 1207 1208 test_string_io_mmio(mem); 1209 1210 test_jmp_noncanonical(mem); 1211 test_illegal_movbe(); 1212 1213 return report_summary(); 1214 } 1215