Lines Matching full:1

29 		       "S"(st1), "c"(sizeof(st1) - 1));  in test_stringio()
30 asm volatile("inb %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT)); in test_stringio()
37 "S"(st1 + sizeof(st1) - 2), "c"(sizeof(st1) - 1)); in test_stringio()
39 asm volatile("in %1, %0\n\t" : "=a"(r) : "i"((short)TESTDEV_IO_PORT)); in test_stringio()
53 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe/cmpsb (1)"); in test_cmps_one()
56 asm volatile("or $1, %[tmp]\n\t" // clear ZF in test_cmps_one()
61 "repe cmpsb (1.zf)"); in test_cmps_one()
68 report(rcx == 0 && rsi == m1 + 30 && rdi == m3 + 30, "repe cmpsw (1)"); in test_cmps_one()
75 report(rcx == 0 && rsi == m1 + 28 && rdi == m3 + 28, "repe cmpll (1)"); in test_cmps_one()
83 report(rcx == 0 && rsi == m1 + 32 && rdi == m3 + 32, "repe cmpsq (1)"); in test_cmps_one()
129 m1[i] = (m3[i] = m2[i] = i) + 1; in test_cmps()
143 report(di == mem + 1 && z, "scasb match"); in test_scas()
147 report(di == mem + 1 && !z, "scasb mismatch"); in test_scas()
184 report(*m == 1, "incl"); in test_incdecnotneg()
188 report(*m == 1, "incb"); in test_incdecnotneg()
193 report(*m == 1, "lock incl"); in test_incdecnotneg()
197 report(*m == 1, "lock incb"); in test_incdecnotneg()
227 report(msw == msw_orig, "smsw (1)"); in test_smsw()
232 zero = 1; in test_smsw()
239 *h_mem = -1ul; in test_smsw()
242 (*h_mem & ~0xfffful) == (-1ul & ~0xfffful), "smsw (3)"); in test_smsw()
256 report((cr0 ^ read_cr0()) == 8, "lmsw (1)"); in test_lmsw()
265 msw = (cr0 & ~1ul) ^ 4; /* change EM to force trap */ in test_lmsw()
267 report((cr0 ^ read_cr0()) == 4 && (cr0 & 1), "lmsw (3)"); in test_lmsw()
281 asm ("btcl $1, %0" :: "m"(a[1]) : "memory"); in test_btc()
282 asm ("btcl %1, %0" :: "m"(a[0]), "r"(66) : "memory"); in test_btc()
283 report(a[0] == 1 && a[1] == 2 && a[2] == 4, "btcl imm8, r/m"); in test_btc()
285 asm ("btcl %1, %0" :: "m"(a[3]), "r"(-1) : "memory"); in test_btc()
286 report(a[0] == 1 && a[1] == 2 && a[2] == 0x80000004, "btcl reg, r/m"); in test_btc()
289 asm ("btcq %1, %0" : : "m"(a[2]), "r"(-1l) : "memory"); in test_btc()
290 report(a[0] == 1 && a[1] == 0x80000002 && a[2] == 0x80000004 && a[3] == 0, in test_btc()
320 report(z == 1, "bsfq r/m, reg"); in test_bsfbsr()
339 report(z == 1, "bsrq r/m, reg"); in test_bsfbsr()
347 *mem = 51; a = 0x1234567812345678ULL & -1ul;; in test_imul()
348 asm ("imulw %1, %%ax" : "+a"(a) : "m"(*mem)); in test_imul()
349 report(a == (0x12345678123439e8ULL & -1ul), "imul ax, mem"); in test_imul()
351 *mem = 51; a = 0x1234567812345678ULL & -1ul;; in test_imul()
352 asm ("imull %1, %%eax" : "+a"(a) : "m"(*mem)); in test_imul()
355 *mem = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul; in test_imul()
356 asm ("imulw $51, %1, %%ax" : "+a"(a) : "m"(*mem)); in test_imul()
357 report(a == (0x87654321876539e8ULL & -1ul), "imul ax, mem, imm8"); in test_imul()
360 asm ("imull $51, %1, %%eax" : "+a"(a) : "m"(*mem)); in test_imul()
363 *mem = 0x1234567812345678ULL; a = 0x8765432187654321ULL & -1ul; in test_imul()
364 asm ("imulw $311, %1, %%ax" : "+a"(a) : "m"(*mem)); in test_imul()
365 report(a == (0x8765432187650bc8ULL & -1ul), "imul ax, mem, imm"); in test_imul()
368 asm ("imull $311, %1, %%eax" : "+a"(a) : "m"(*mem)); in test_imul()
373 asm ("imulq %1, %%rax" : "+a"(a) : "m"(*mem)); in test_imul()
377 asm ("imulq $51, %1, %%rax" : "+a"(a) : "m"(*mem)); in test_imul()
381 asm ("imulq $311, %1, %%rax" : "+a"(a) : "m"(*mem)); in test_imul()
408 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; \ in test_sse()
409 asm(insn " %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); \ in test_sse()
411 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; \ in test_sse()
412 asm(insn " %1, %0" : "=x"(vv) : "m"(*mem) : "memory"); \ in test_sse()
455 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; in test_sse_exceptions()
456 mem[0] = 5; mem[1] = 6; mem[2] = 8; mem[3] = 9; in test_sse_exceptions()
457 asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); in test_sse_exceptions()
460 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; in test_sse_exceptions()
461 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; in test_sse_exceptions()
462 asm("movupd %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); in test_sse_exceptions()
466 asm("movaps %1, %0\n\t unaligned_movaps_cont:" in test_sse_exceptions()
469 report(exceptions == 1, "unaligned movaps exception"); in test_sse_exceptions()
473 v[0] = 1; v[1] = 2; v[2] = 3; v[3] = 4; in test_sse_exceptions()
474 mem[0] = 5; mem[1] = 6; mem[2] = 7; mem[3] = 8; in test_sse_exceptions()
476 asm("movups %1, %0" : "=m"(*mem) : "x"(vv) : "memory"); in test_sse_exceptions()
480 search = find_pte_level(current_page_table(), page2, 1); in test_sse_exceptions()
482 install_pte(current_page_table(), 1, page2, 0, NULL); in test_sse_exceptions()
487 asm("movups %1, %0\n\t cross_movups_cont:" : "=m"(*mem) : "x"(vv) : in test_sse_exceptions()
490 report(exceptions == 1, "movups crosspage exception"); in test_sse_exceptions()
493 install_pte(current_page_table(), 1, page2, orig_pte, NULL); in test_sse_exceptions()
499 asm("shld %2, %1, %0" : "+m"(*mem) : "r"(0xaaaaaaaaU), "c"((u8)3)); in test_shld_shrd()
502 asm("shrd %2, %1, %0" : "+m"(*mem) : "r"(0x55555555U), "c"((u8)3)); in test_shld_shrd()
510 const unsigned long in_rax = 0x1234567890abcdefull & -1ul; in test_smsw_reg()
550 asm volatile (ASM_TRY_FEP("1f") in test_illegal_lea()
552 "1:" in test_illegal_lea()
567 asm volatile("mov %1, %0" : "=r"(w) : "m"(*pw) : "memory"); in test_crosspage_mmio()
569 asm volatile("mov %1, %0" : "=m"(*pw) : "r"((uint16_t)0x88aa)); in test_crosspage_mmio()
627 struct descriptor_table_ptr gdt_ptr = { .limit = sizeof(gdt) - 1,
647 uint64_t busy_mask = (uint64_t)1 << 41; in test_ltr()
663 t1 = 0x123456789abcdefull & -1ul; in test_mov()
669 report(t2 == (0x123456789abcdefull & -1ul), "mov reg, r/m (1)"); in test_mov()
675 asm("or %1, %0" : "+m"(*mem) : "r"(0x8001)); in test_simplealu()
677 asm("add %1, %0" : "+m"(*mem) : "r"(2)); in test_simplealu()
679 asm("xor %1, %0" : "+m"(*mem) : "r"(0x1111)); in test_simplealu()
681 asm("sub %1, %0" : "+m"(*mem) : "r"(0x26)); in test_simplealu()
683 asm("clc; adc %1, %0" : "+m"(*mem) : "r"(0x100)); in test_simplealu()
685 asm("stc; adc %1, %0" : "+m"(*mem) : "r"(0x100)); in test_simplealu()
687 asm("clc; sbb %1, %0" : "+m"(*mem) : "r"(0)); in test_simplealu()
689 asm("stc; sbb %1, %0" : "+m"(*mem) : "r"(0)); in test_simplealu()
690 report(*mem == 0x8500, "sbb(1)"); in test_simplealu()
691 asm("and %1, %0" : "+m"(*mem) : "r"(0xfe77)); in test_simplealu()
693 asm("test %1, %0" : "+m"(*mem) : "r"(0xf000)); in test_simplealu()
706 asm volatile(ASM_TRY("1f") in test_illegal_movbe()
708 "1:" in test_illegal_movbe()
732 int nr_expected = is_intel() ? 0 : 1; in test_mov_pop_ss_code_db()
744 asm volatile("lea 1f " RIP_RELATIVE ", %0\n\t" \ in test_mov_pop_ss_code_db()
748 fep2 "1: xor %0, %0\n\t" \ in test_mov_pop_ss_code_db()
795 "use kvm.force_emulation_prefix=1 to enable"); in main()