Lines Matching +full:reserved +full:- +full:memory
18 * Get a linear address by combining @addr with a non-canonical pattern in the
148 /* RESERVED 0 (3) */
151 /* RESERVED 0 (5) */
167 /* RESERVED 0 (15) */
239 * Each X86_FEATURE_XXX definition is 64-bit and contains the following
240 * CPUID meta-data:
343 asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory"); in clac()
348 asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory"); in stac()
408 asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory"); in write_ds()
413 asm volatile ("mov %0, %%es" : : "rm"(val) : "memory"); in write_es()
418 asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory"); in write_ss()
423 asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory"); in write_fs()
428 asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory"); in write_gs()
445 * infrastructure uses per-CPU data and thus consumes GS.base. Various tests
452 asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory"); in rdmsr()
459 asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory"); in wrmsr()
550 asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory"); in read_cr0()
556 asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory"); in write_cr2()
562 asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory"); in read_cr2()
582 asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory"); in read_cr3()
607 asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory"); in read_cr4()
613 asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory"); in write_cr8()
619 asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory"); in read_cr8()
709 asm volatile ("mov %0, %%dr0" : : "r"(val) : "memory"); in write_dr0()
714 asm volatile ("mov %0, %%dr1" : : "r"(val) : "memory"); in write_dr1()
719 asm volatile ("mov %0, %%dr2" : : "r"(val) : "memory"); in write_dr2()
724 asm volatile ("mov %0, %%dr3" : : "r"(val) : "memory"); in write_dr3()
729 asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory"); in write_dr6()
741 asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory"); in write_dr7()
859 asm volatile("invlpg (%0)" ::"r" (va) : "memory"); in invlpg()
912 int shift_amt = 64 - va_width; in is_canonical()
920 : "+m" (*addr) : "Ir" (bit) : "cc", "memory"); in clear_bit()
926 : "+m" (*addr) : "Ir" (bit) : "cc", "memory"); in set_bit()
972 * Trigger an #AC by writing 8 bytes to a 4-byte aligned address. in generate_usermode_ac()
974 * on a 16-byte boundary as x86_64 stacks should be. in generate_usermode_ac()
976 asm volatile("movq $0, -0x4(%rsp)"); in generate_usermode_ac()
982 * Switch from 64-bit to 32-bit mode and generate #OF via INTO. Note, if RIP
983 * or RSP holds a 64-bit value, this helper will NOT generate #OF.