1c865f654SCornelia Huck #ifndef _X86_PROCESSOR_H_ 2c865f654SCornelia Huck #define _X86_PROCESSOR_H_ 37d36db35SAvi Kivity 47d36db35SAvi Kivity #include "libcflat.h" 5142ff635SSean Christopherson #include "desc.h" 6867f820dSPaolo Bonzini #include "msr.h" 72b2d7aadSAvi Kivity #include <stdint.h> 87d36db35SAvi Kivity 988f0bb17SSean Christopherson #define NONCANONICAL 0xaaaaaaaaaaaaaaaaull 1088f0bb17SSean Christopherson 11a3d1fb55SPaolo Bonzini #ifdef __x86_64__ 12a3d1fb55SPaolo Bonzini # define R "r" 13a3d1fb55SPaolo Bonzini # define W "q" 14a3d1fb55SPaolo Bonzini # define S "8" 15a3d1fb55SPaolo Bonzini #else 16a3d1fb55SPaolo Bonzini # define R "e" 17a3d1fb55SPaolo Bonzini # define W "l" 18a3d1fb55SPaolo Bonzini # define S "4" 19a3d1fb55SPaolo Bonzini #endif 20a3d1fb55SPaolo Bonzini 212b934609SXiaoyao Li #define DB_VECTOR 1 222b934609SXiaoyao Li #define BP_VECTOR 3 232b934609SXiaoyao Li #define UD_VECTOR 6 248d2cdb35SMarc Orr #define DF_VECTOR 8 258d2cdb35SMarc Orr #define TS_VECTOR 10 268d2cdb35SMarc Orr #define NP_VECTOR 11 278d2cdb35SMarc Orr #define SS_VECTOR 12 288d2cdb35SMarc Orr #define GP_VECTOR 13 298d2cdb35SMarc Orr #define PF_VECTOR 14 308d2cdb35SMarc Orr #define AC_VECTOR 17 31c986dbe8SNadav Amit #define CP_VECTOR 21 328d2cdb35SMarc Orr 33b6a0ff03SPaolo Bonzini #define X86_CR0_PE 0x00000001 34b6a0ff03SPaolo Bonzini #define X86_CR0_MP 0x00000002 35ca43ec68SJim Mattson #define X86_CR0_EM 0x00000004 36b6a0ff03SPaolo Bonzini #define X86_CR0_TS 0x00000008 37b6a0ff03SPaolo Bonzini #define X86_CR0_WP 0x00010000 387488d290SPaolo Bonzini #define X86_CR0_AM 0x00040000 3933a6576cSKrish Sadhukhan #define X86_CR0_NW 0x20000000 4033a6576cSKrish Sadhukhan #define X86_CR0_CD 0x40000000 41b6a0ff03SPaolo Bonzini #define X86_CR0_PG 0x80000000 4221ee643dSHaozhong Zhang #define X86_CR3_PCID_MASK 0x00000fff 43b6a0ff03SPaolo Bonzini #define X86_CR4_TSD 0x00000004 44b6a0ff03SPaolo Bonzini #define X86_CR4_DE 0x00000008 45b6a0ff03SPaolo Bonzini #define X86_CR4_PSE 0x00000010 46b6a0ff03SPaolo Bonzini #define X86_CR4_PAE 0x00000020 4721ee643dSHaozhong Zhang #define X86_CR4_MCE 0x00000040 485868743aSMarc Orr #define X86_CR4_PGE 0x00000080 4999412f1aSArbel Moshe #define X86_CR4_PCE 0x00000100 50728e71eeSPaolo Bonzini #define X86_CR4_UMIP 0x00000800 5163f684f3SSean Christopherson #define X86_CR4_LA57 0x00001000 520c4e631eSPeter Feiner #define X86_CR4_VMXE 0x00002000 53b6a0ff03SPaolo Bonzini #define X86_CR4_PCIDE 0x00020000 5474a66858SJim Mattson #define X86_CR4_OSXSAVE 0x00040000 55bfed1760SKrish Sadhukhan #define X86_CR4_SMEP 0x00100000 56fa6318d1SPaolo Bonzini #define X86_CR4_SMAP 0x00200000 573da70799SHuaitong Han #define X86_CR4_PKE 0x00400000 5879e53994SYang Weijiang #define X86_CR4_CET 0x00800000 59fdae6092SChenyi Qiang #define X86_CR4_PKS 0x01000000 60b6a0ff03SPaolo Bonzini 617488d290SPaolo Bonzini #define X86_EFLAGS_CF 0x00000001 6245e10b81SJim Mattson #define X86_EFLAGS_FIXED 0x00000002 633b50efe3SPeter Feiner #define X86_EFLAGS_PF 0x00000004 643b50efe3SPeter Feiner #define X86_EFLAGS_AF 0x00000010 657488d290SPaolo Bonzini #define X86_EFLAGS_ZF 0x00000040 663b50efe3SPeter Feiner #define X86_EFLAGS_SF 0x00000080 67a1e63d03SPaolo Bonzini #define X86_EFLAGS_TF 0x00000100 68a1e63d03SPaolo Bonzini #define X86_EFLAGS_IF 0x00000200 69a1e63d03SPaolo Bonzini #define X86_EFLAGS_DF 0x00000400 703b50efe3SPeter Feiner #define X86_EFLAGS_OF 0x00000800 71728e71eeSPaolo Bonzini #define X86_EFLAGS_IOPL 0x00003000 72a1e63d03SPaolo Bonzini #define X86_EFLAGS_NT 0x00004000 73db35bb32SKrish Sadhukhan #define X86_EFLAGS_RF 0x00010000 747820ac52SKrish Sadhukhan #define X86_EFLAGS_VM 0x00020000 757488d290SPaolo Bonzini #define X86_EFLAGS_AC 0x00040000 767488d290SPaolo Bonzini 7760d8090cSLiran Alon #define X86_EFLAGS_ALU (X86_EFLAGS_CF | X86_EFLAGS_PF | X86_EFLAGS_AF | \ 7860d8090cSLiran Alon X86_EFLAGS_ZF | X86_EFLAGS_SF | X86_EFLAGS_OF) 7960d8090cSLiran Alon 80a3d1fb55SPaolo Bonzini 816ddcc298SKrish Sadhukhan /* 826ddcc298SKrish Sadhukhan * CPU features 836ddcc298SKrish Sadhukhan */ 846ddcc298SKrish Sadhukhan 856ddcc298SKrish Sadhukhan enum cpuid_output_regs { 866ddcc298SKrish Sadhukhan EAX, 876ddcc298SKrish Sadhukhan EBX, 886ddcc298SKrish Sadhukhan ECX, 896ddcc298SKrish Sadhukhan EDX 906ddcc298SKrish Sadhukhan }; 916ddcc298SKrish Sadhukhan 926ddcc298SKrish Sadhukhan struct cpuid { u32 a, b, c, d; }; 936ddcc298SKrish Sadhukhan 946ddcc298SKrish Sadhukhan static inline struct cpuid raw_cpuid(u32 function, u32 index) 956ddcc298SKrish Sadhukhan { 966ddcc298SKrish Sadhukhan struct cpuid r; 976ddcc298SKrish Sadhukhan asm volatile ("cpuid" 986ddcc298SKrish Sadhukhan : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d) 996ddcc298SKrish Sadhukhan : "0"(function), "2"(index)); 1006ddcc298SKrish Sadhukhan return r; 1016ddcc298SKrish Sadhukhan } 1026ddcc298SKrish Sadhukhan 1036ddcc298SKrish Sadhukhan static inline struct cpuid cpuid_indexed(u32 function, u32 index) 1046ddcc298SKrish Sadhukhan { 1056ddcc298SKrish Sadhukhan u32 level = raw_cpuid(function & 0xf0000000, 0).a; 1066ddcc298SKrish Sadhukhan if (level < function) 1076ddcc298SKrish Sadhukhan return (struct cpuid) { 0, 0, 0, 0 }; 1086ddcc298SKrish Sadhukhan return raw_cpuid(function, index); 1096ddcc298SKrish Sadhukhan } 1106ddcc298SKrish Sadhukhan 1116ddcc298SKrish Sadhukhan static inline struct cpuid cpuid(u32 function) 1126ddcc298SKrish Sadhukhan { 1136ddcc298SKrish Sadhukhan return cpuid_indexed(function, 0); 1146ddcc298SKrish Sadhukhan } 1156ddcc298SKrish Sadhukhan 1166ddcc298SKrish Sadhukhan static inline u8 cpuid_maxphyaddr(void) 1176ddcc298SKrish Sadhukhan { 1186ddcc298SKrish Sadhukhan if (raw_cpuid(0x80000000, 0).a < 0x80000008) 1196ddcc298SKrish Sadhukhan return 36; 1206ddcc298SKrish Sadhukhan return raw_cpuid(0x80000008, 0).a & 0xff; 1216ddcc298SKrish Sadhukhan } 1226ddcc298SKrish Sadhukhan 12322abdd39SNadav Amit static inline bool is_intel(void) 12422abdd39SNadav Amit { 12522abdd39SNadav Amit struct cpuid c = cpuid(0); 12622abdd39SNadav Amit u32 name[4] = {c.b, c.d, c.c }; 12722abdd39SNadav Amit 12822abdd39SNadav Amit return strcmp((char *)name, "GenuineIntel") == 0; 12922abdd39SNadav Amit } 13022abdd39SNadav Amit 1316ddcc298SKrish Sadhukhan #define CPUID(a, b, c, d) ((((unsigned long long) a) << 32) | (b << 16) | \ 1326ddcc298SKrish Sadhukhan (c << 8) | d) 1336ddcc298SKrish Sadhukhan 1346ddcc298SKrish Sadhukhan /* 1356ddcc298SKrish Sadhukhan * Each X86_FEATURE_XXX definition is 64-bit and contains the following 1366ddcc298SKrish Sadhukhan * CPUID meta-data: 1376ddcc298SKrish Sadhukhan * 1386ddcc298SKrish Sadhukhan * [63:32] : input value for EAX 1396ddcc298SKrish Sadhukhan * [31:16] : input value for ECX 1406ddcc298SKrish Sadhukhan * [15:8] : output register 1416ddcc298SKrish Sadhukhan * [7:0] : bit position in output register 1426ddcc298SKrish Sadhukhan */ 1436ddcc298SKrish Sadhukhan 1446ddcc298SKrish Sadhukhan /* 145b52bf046SSean Christopherson * Basic Leafs, a.k.a. Intel defined 1466ddcc298SKrish Sadhukhan */ 1476ddcc298SKrish Sadhukhan #define X86_FEATURE_MWAIT (CPUID(0x1, 0, ECX, 3)) 1486ddcc298SKrish Sadhukhan #define X86_FEATURE_VMX (CPUID(0x1, 0, ECX, 5)) 1496ddcc298SKrish Sadhukhan #define X86_FEATURE_PCID (CPUID(0x1, 0, ECX, 17)) 1506ddcc298SKrish Sadhukhan #define X86_FEATURE_MOVBE (CPUID(0x1, 0, ECX, 22)) 1516ddcc298SKrish Sadhukhan #define X86_FEATURE_TSC_DEADLINE_TIMER (CPUID(0x1, 0, ECX, 24)) 1526ddcc298SKrish Sadhukhan #define X86_FEATURE_XSAVE (CPUID(0x1, 0, ECX, 26)) 1536ddcc298SKrish Sadhukhan #define X86_FEATURE_OSXSAVE (CPUID(0x1, 0, ECX, 27)) 1546ddcc298SKrish Sadhukhan #define X86_FEATURE_RDRAND (CPUID(0x1, 0, ECX, 30)) 1556ddcc298SKrish Sadhukhan #define X86_FEATURE_MCE (CPUID(0x1, 0, EDX, 7)) 1566ddcc298SKrish Sadhukhan #define X86_FEATURE_APIC (CPUID(0x1, 0, EDX, 9)) 1576ddcc298SKrish Sadhukhan #define X86_FEATURE_CLFLUSH (CPUID(0x1, 0, EDX, 19)) 1586ddcc298SKrish Sadhukhan #define X86_FEATURE_XMM (CPUID(0x1, 0, EDX, 25)) 1596ddcc298SKrish Sadhukhan #define X86_FEATURE_XMM2 (CPUID(0x1, 0, EDX, 26)) 1606ddcc298SKrish Sadhukhan #define X86_FEATURE_TSC_ADJUST (CPUID(0x7, 0, EBX, 1)) 1616163f75dSPaolo Bonzini #define X86_FEATURE_HLE (CPUID(0x7, 0, EBX, 4)) 16297b5f955SSean Christopherson #define X86_FEATURE_SMEP (CPUID(0x7, 0, EBX, 7)) 1636ddcc298SKrish Sadhukhan #define X86_FEATURE_INVPCID (CPUID(0x7, 0, EBX, 10)) 1646ddcc298SKrish Sadhukhan #define X86_FEATURE_RTM (CPUID(0x7, 0, EBX, 11)) 1656ddcc298SKrish Sadhukhan #define X86_FEATURE_SMAP (CPUID(0x7, 0, EBX, 20)) 1666ddcc298SKrish Sadhukhan #define X86_FEATURE_PCOMMIT (CPUID(0x7, 0, EBX, 22)) 1676ddcc298SKrish Sadhukhan #define X86_FEATURE_CLFLUSHOPT (CPUID(0x7, 0, EBX, 23)) 1686ddcc298SKrish Sadhukhan #define X86_FEATURE_CLWB (CPUID(0x7, 0, EBX, 24)) 1696ddcc298SKrish Sadhukhan #define X86_FEATURE_UMIP (CPUID(0x7, 0, ECX, 2)) 1706ddcc298SKrish Sadhukhan #define X86_FEATURE_PKU (CPUID(0x7, 0, ECX, 3)) 1716ddcc298SKrish Sadhukhan #define X86_FEATURE_LA57 (CPUID(0x7, 0, ECX, 16)) 1726ddcc298SKrish Sadhukhan #define X86_FEATURE_RDPID (CPUID(0x7, 0, ECX, 22)) 17379e53994SYang Weijiang #define X86_FEATURE_SHSTK (CPUID(0x7, 0, ECX, 7)) 17479e53994SYang Weijiang #define X86_FEATURE_IBT (CPUID(0x7, 0, EDX, 20)) 1756ddcc298SKrish Sadhukhan #define X86_FEATURE_SPEC_CTRL (CPUID(0x7, 0, EDX, 26)) 1766163f75dSPaolo Bonzini #define X86_FEATURE_ARCH_CAPABILITIES (CPUID(0x7, 0, EDX, 29)) 177fdae6092SChenyi Qiang #define X86_FEATURE_PKS (CPUID(0x7, 0, ECX, 31)) 1786ddcc298SKrish Sadhukhan 1796ddcc298SKrish Sadhukhan /* 180b52bf046SSean Christopherson * Extended Leafs, a.k.a. AMD defined 1816ddcc298SKrish Sadhukhan */ 1826ddcc298SKrish Sadhukhan #define X86_FEATURE_SVM (CPUID(0x80000001, 0, ECX, 2)) 183b52bf046SSean Christopherson #define X86_FEATURE_NX (CPUID(0x80000001, 0, EDX, 20)) 184b52bf046SSean Christopherson #define X86_FEATURE_GBPAGES (CPUID(0x80000001, 0, EDX, 26)) 1856ddcc298SKrish Sadhukhan #define X86_FEATURE_RDTSCP (CPUID(0x80000001, 0, EDX, 27)) 186b52bf046SSean Christopherson #define X86_FEATURE_LM (CPUID(0x80000001, 0, EDX, 29)) 187b52bf046SSean Christopherson #define X86_FEATURE_RDPRU (CPUID(0x80000008, 0, EBX, 4)) 1886ddcc298SKrish Sadhukhan #define X86_FEATURE_AMD_IBPB (CPUID(0x80000008, 0, EBX, 12)) 1896ddcc298SKrish Sadhukhan #define X86_FEATURE_NPT (CPUID(0x8000000A, 0, EDX, 0)) 1906ddcc298SKrish Sadhukhan #define X86_FEATURE_NRIPS (CPUID(0x8000000A, 0, EDX, 3)) 191f6972bd6SLara Lazier #define X86_FEATURE_VGIF (CPUID(0x8000000A, 0, EDX, 16)) 1926ddcc298SKrish Sadhukhan 1936ddcc298SKrish Sadhukhan 1946ddcc298SKrish Sadhukhan static inline bool this_cpu_has(u64 feature) 1956ddcc298SKrish Sadhukhan { 1966ddcc298SKrish Sadhukhan u32 input_eax = feature >> 32; 1976ddcc298SKrish Sadhukhan u32 input_ecx = (feature >> 16) & 0xffff; 1986ddcc298SKrish Sadhukhan u32 output_reg = (feature >> 8) & 0xff; 1996ddcc298SKrish Sadhukhan u8 bit = feature & 0xff; 2006ddcc298SKrish Sadhukhan struct cpuid c; 2016ddcc298SKrish Sadhukhan u32 *tmp; 2026ddcc298SKrish Sadhukhan 2036ddcc298SKrish Sadhukhan c = cpuid_indexed(input_eax, input_ecx); 2046ddcc298SKrish Sadhukhan tmp = (u32 *)&c; 2056ddcc298SKrish Sadhukhan 2066ddcc298SKrish Sadhukhan return ((*(tmp + (output_reg % 32))) & (1 << bit)); 2076ddcc298SKrish Sadhukhan } 2086ddcc298SKrish Sadhukhan 2091bde9127SJim Mattson struct far_pointer32 { 2101bde9127SJim Mattson u32 offset; 2111bde9127SJim Mattson u16 selector; 2121bde9127SJim Mattson } __attribute__((packed)); 2131bde9127SJim Mattson 2147d36db35SAvi Kivity struct descriptor_table_ptr { 2157d36db35SAvi Kivity u16 limit; 2167d36db35SAvi Kivity ulong base; 2177d36db35SAvi Kivity } __attribute__((packed)); 2187d36db35SAvi Kivity 219fa6318d1SPaolo Bonzini static inline void clac(void) 220fa6318d1SPaolo Bonzini { 221fa6318d1SPaolo Bonzini asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory"); 222fa6318d1SPaolo Bonzini } 223fa6318d1SPaolo Bonzini 224fa6318d1SPaolo Bonzini static inline void stac(void) 225fa6318d1SPaolo Bonzini { 226fa6318d1SPaolo Bonzini asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory"); 227fa6318d1SPaolo Bonzini } 228fa6318d1SPaolo Bonzini 2297d36db35SAvi Kivity static inline u16 read_cs(void) 2307d36db35SAvi Kivity { 2317d36db35SAvi Kivity unsigned val; 2327d36db35SAvi Kivity 233eab64501SPaolo Bonzini asm volatile ("mov %%cs, %0" : "=mr"(val)); 2347d36db35SAvi Kivity return val; 2357d36db35SAvi Kivity } 2367d36db35SAvi Kivity 2377d36db35SAvi Kivity static inline u16 read_ds(void) 2387d36db35SAvi Kivity { 2397d36db35SAvi Kivity unsigned val; 2407d36db35SAvi Kivity 241eab64501SPaolo Bonzini asm volatile ("mov %%ds, %0" : "=mr"(val)); 2427d36db35SAvi Kivity return val; 2437d36db35SAvi Kivity } 2447d36db35SAvi Kivity 2457d36db35SAvi Kivity static inline u16 read_es(void) 2467d36db35SAvi Kivity { 2477d36db35SAvi Kivity unsigned val; 2487d36db35SAvi Kivity 249eab64501SPaolo Bonzini asm volatile ("mov %%es, %0" : "=mr"(val)); 2507d36db35SAvi Kivity return val; 2517d36db35SAvi Kivity } 2527d36db35SAvi Kivity 2537d36db35SAvi Kivity static inline u16 read_ss(void) 2547d36db35SAvi Kivity { 2557d36db35SAvi Kivity unsigned val; 2567d36db35SAvi Kivity 257eab64501SPaolo Bonzini asm volatile ("mov %%ss, %0" : "=mr"(val)); 2587d36db35SAvi Kivity return val; 2597d36db35SAvi Kivity } 2607d36db35SAvi Kivity 2617d36db35SAvi Kivity static inline u16 read_fs(void) 2627d36db35SAvi Kivity { 2637d36db35SAvi Kivity unsigned val; 2647d36db35SAvi Kivity 265eab64501SPaolo Bonzini asm volatile ("mov %%fs, %0" : "=mr"(val)); 2667d36db35SAvi Kivity return val; 2677d36db35SAvi Kivity } 2687d36db35SAvi Kivity 2697d36db35SAvi Kivity static inline u16 read_gs(void) 2707d36db35SAvi Kivity { 2717d36db35SAvi Kivity unsigned val; 2727d36db35SAvi Kivity 273eab64501SPaolo Bonzini asm volatile ("mov %%gs, %0" : "=mr"(val)); 2747d36db35SAvi Kivity return val; 2757d36db35SAvi Kivity } 2767d36db35SAvi Kivity 27777e03b63SGleb Natapov static inline unsigned long read_rflags(void) 27877e03b63SGleb Natapov { 27977e03b63SGleb Natapov unsigned long f; 280eab64501SPaolo Bonzini asm volatile ("pushf; pop %0\n\t" : "=rm"(f)); 28177e03b63SGleb Natapov return f; 28277e03b63SGleb Natapov } 28377e03b63SGleb Natapov 2847d36db35SAvi Kivity static inline void write_ds(unsigned val) 2857d36db35SAvi Kivity { 286eab64501SPaolo Bonzini asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory"); 2877d36db35SAvi Kivity } 2887d36db35SAvi Kivity 2897d36db35SAvi Kivity static inline void write_es(unsigned val) 2907d36db35SAvi Kivity { 291eab64501SPaolo Bonzini asm volatile ("mov %0, %%es" : : "rm"(val) : "memory"); 2927d36db35SAvi Kivity } 2937d36db35SAvi Kivity 2947d36db35SAvi Kivity static inline void write_ss(unsigned val) 2957d36db35SAvi Kivity { 296eab64501SPaolo Bonzini asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory"); 2977d36db35SAvi Kivity } 2987d36db35SAvi Kivity 2997d36db35SAvi Kivity static inline void write_fs(unsigned val) 3007d36db35SAvi Kivity { 301eab64501SPaolo Bonzini asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory"); 3027d36db35SAvi Kivity } 3037d36db35SAvi Kivity 3047d36db35SAvi Kivity static inline void write_gs(unsigned val) 3057d36db35SAvi Kivity { 306eab64501SPaolo Bonzini asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory"); 3077d36db35SAvi Kivity } 3087d36db35SAvi Kivity 3097488d290SPaolo Bonzini static inline void write_rflags(unsigned long f) 3107488d290SPaolo Bonzini { 311eab64501SPaolo Bonzini asm volatile ("push %0; popf\n\t" : : "rm"(f)); 3127488d290SPaolo Bonzini } 3137488d290SPaolo Bonzini 314728e71eeSPaolo Bonzini static inline void set_iopl(int iopl) 315728e71eeSPaolo Bonzini { 316728e71eeSPaolo Bonzini unsigned long flags = read_rflags() & ~X86_EFLAGS_IOPL; 317728e71eeSPaolo Bonzini flags |= iopl * (X86_EFLAGS_IOPL / 3); 318728e71eeSPaolo Bonzini write_rflags(flags); 319728e71eeSPaolo Bonzini } 320728e71eeSPaolo Bonzini 3217d36db35SAvi Kivity static inline u64 rdmsr(u32 index) 3227d36db35SAvi Kivity { 3237d36db35SAvi Kivity u32 a, d; 3247d36db35SAvi Kivity asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory"); 3257d36db35SAvi Kivity return a | ((u64)d << 32); 3267d36db35SAvi Kivity } 3277d36db35SAvi Kivity 3287d36db35SAvi Kivity static inline void wrmsr(u32 index, u64 val) 3297d36db35SAvi Kivity { 3307d36db35SAvi Kivity u32 a = val, d = val >> 32; 3317d36db35SAvi Kivity asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory"); 3327d36db35SAvi Kivity } 3337d36db35SAvi Kivity 334142ff635SSean Christopherson static inline int rdmsr_checking(u32 index) 335142ff635SSean Christopherson { 336142ff635SSean Christopherson asm volatile (ASM_TRY("1f") 337142ff635SSean Christopherson "rdmsr\n\t" 338142ff635SSean Christopherson "1:" 339142ff635SSean Christopherson : : "c"(index) : "memory", "eax", "edx"); 340142ff635SSean Christopherson return exception_vector(); 341142ff635SSean Christopherson } 342142ff635SSean Christopherson 343142ff635SSean Christopherson static inline int wrmsr_checking(u32 index, u64 val) 344142ff635SSean Christopherson { 345142ff635SSean Christopherson u32 a = val, d = val >> 32; 346142ff635SSean Christopherson 347142ff635SSean Christopherson asm volatile (ASM_TRY("1f") 348142ff635SSean Christopherson "wrmsr\n\t" 349142ff635SSean Christopherson "1:" 350142ff635SSean Christopherson : : "a"(a), "d"(d), "c"(index) : "memory"); 351142ff635SSean Christopherson return exception_vector(); 352142ff635SSean Christopherson } 353142ff635SSean Christopherson 3542b2d7aadSAvi Kivity static inline uint64_t rdpmc(uint32_t index) 3552b2d7aadSAvi Kivity { 3562b2d7aadSAvi Kivity uint32_t a, d; 3572b2d7aadSAvi Kivity asm volatile ("rdpmc" : "=a"(a), "=d"(d) : "c"(index)); 3582b2d7aadSAvi Kivity return a | ((uint64_t)d << 32); 3592b2d7aadSAvi Kivity } 3602b2d7aadSAvi Kivity 3617d36db35SAvi Kivity static inline void write_cr0(ulong val) 3627d36db35SAvi Kivity { 3637d36db35SAvi Kivity asm volatile ("mov %0, %%cr0" : : "r"(val) : "memory"); 3647d36db35SAvi Kivity } 3657d36db35SAvi Kivity 3667d36db35SAvi Kivity static inline ulong read_cr0(void) 3677d36db35SAvi Kivity { 3687d36db35SAvi Kivity ulong val; 3697d36db35SAvi Kivity asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory"); 3707d36db35SAvi Kivity return val; 3717d36db35SAvi Kivity } 3727d36db35SAvi Kivity 3737d36db35SAvi Kivity static inline void write_cr2(ulong val) 3747d36db35SAvi Kivity { 3757d36db35SAvi Kivity asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory"); 3767d36db35SAvi Kivity } 3777d36db35SAvi Kivity 3787d36db35SAvi Kivity static inline ulong read_cr2(void) 3797d36db35SAvi Kivity { 3807d36db35SAvi Kivity ulong val; 3817d36db35SAvi Kivity asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory"); 3827d36db35SAvi Kivity return val; 3837d36db35SAvi Kivity } 3847d36db35SAvi Kivity 3857d36db35SAvi Kivity static inline void write_cr3(ulong val) 3867d36db35SAvi Kivity { 3877d36db35SAvi Kivity asm volatile ("mov %0, %%cr3" : : "r"(val) : "memory"); 3887d36db35SAvi Kivity } 3897d36db35SAvi Kivity 3907d36db35SAvi Kivity static inline ulong read_cr3(void) 3917d36db35SAvi Kivity { 3927d36db35SAvi Kivity ulong val; 3937d36db35SAvi Kivity asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory"); 3947d36db35SAvi Kivity return val; 3957d36db35SAvi Kivity } 3967d36db35SAvi Kivity 3971c320e18SYadong Qi static inline void update_cr3(void *cr3) 3981c320e18SYadong Qi { 3991c320e18SYadong Qi write_cr3((ulong)cr3); 4001c320e18SYadong Qi } 4011c320e18SYadong Qi 4027d36db35SAvi Kivity static inline void write_cr4(ulong val) 4037d36db35SAvi Kivity { 4047d36db35SAvi Kivity asm volatile ("mov %0, %%cr4" : : "r"(val) : "memory"); 4057d36db35SAvi Kivity } 4067d36db35SAvi Kivity 4077d36db35SAvi Kivity static inline ulong read_cr4(void) 4087d36db35SAvi Kivity { 4097d36db35SAvi Kivity ulong val; 4107d36db35SAvi Kivity asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory"); 4117d36db35SAvi Kivity return val; 4127d36db35SAvi Kivity } 4137d36db35SAvi Kivity 4147d36db35SAvi Kivity static inline void write_cr8(ulong val) 4157d36db35SAvi Kivity { 4167d36db35SAvi Kivity asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory"); 4177d36db35SAvi Kivity } 4187d36db35SAvi Kivity 4197d36db35SAvi Kivity static inline ulong read_cr8(void) 4207d36db35SAvi Kivity { 4217d36db35SAvi Kivity ulong val; 4227d36db35SAvi Kivity asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory"); 4237d36db35SAvi Kivity return val; 4247d36db35SAvi Kivity } 4257d36db35SAvi Kivity 4267d36db35SAvi Kivity static inline void lgdt(const struct descriptor_table_ptr *ptr) 4277d36db35SAvi Kivity { 4287d36db35SAvi Kivity asm volatile ("lgdt %0" : : "m"(*ptr)); 4297d36db35SAvi Kivity } 4307d36db35SAvi Kivity 4317d36db35SAvi Kivity static inline void sgdt(struct descriptor_table_ptr *ptr) 4327d36db35SAvi Kivity { 4337d36db35SAvi Kivity asm volatile ("sgdt %0" : "=m"(*ptr)); 4347d36db35SAvi Kivity } 4357d36db35SAvi Kivity 4367d36db35SAvi Kivity static inline void lidt(const struct descriptor_table_ptr *ptr) 4377d36db35SAvi Kivity { 4387d36db35SAvi Kivity asm volatile ("lidt %0" : : "m"(*ptr)); 4397d36db35SAvi Kivity } 4407d36db35SAvi Kivity 4417d36db35SAvi Kivity static inline void sidt(struct descriptor_table_ptr *ptr) 4427d36db35SAvi Kivity { 4437d36db35SAvi Kivity asm volatile ("sidt %0" : "=m"(*ptr)); 4447d36db35SAvi Kivity } 4457d36db35SAvi Kivity 4467a14c1d9SJim Mattson static inline void lldt(u16 val) 4477d36db35SAvi Kivity { 4487d36db35SAvi Kivity asm volatile ("lldt %0" : : "rm"(val)); 4497d36db35SAvi Kivity } 4507d36db35SAvi Kivity 4517d36db35SAvi Kivity static inline u16 sldt(void) 4527d36db35SAvi Kivity { 4537d36db35SAvi Kivity u16 val; 4547d36db35SAvi Kivity asm volatile ("sldt %0" : "=rm"(val)); 4557d36db35SAvi Kivity return val; 4567d36db35SAvi Kivity } 4577d36db35SAvi Kivity 458fd5d3dc6SAvi Kivity static inline void ltr(u16 val) 4597d36db35SAvi Kivity { 4607d36db35SAvi Kivity asm volatile ("ltr %0" : : "rm"(val)); 4617d36db35SAvi Kivity } 4627d36db35SAvi Kivity 4637d36db35SAvi Kivity static inline u16 str(void) 4647d36db35SAvi Kivity { 4657d36db35SAvi Kivity u16 val; 4667d36db35SAvi Kivity asm volatile ("str %0" : "=rm"(val)); 4677d36db35SAvi Kivity return val; 4687d36db35SAvi Kivity } 4697d36db35SAvi Kivity 4707f8f7356SKrish Sadhukhan static inline void write_dr0(void *val) 4717f8f7356SKrish Sadhukhan { 4727f8f7356SKrish Sadhukhan asm volatile ("mov %0, %%dr0" : : "r"(val) : "memory"); 4737f8f7356SKrish Sadhukhan } 4747f8f7356SKrish Sadhukhan 4757f8f7356SKrish Sadhukhan static inline void write_dr1(void *val) 4767f8f7356SKrish Sadhukhan { 4777f8f7356SKrish Sadhukhan asm volatile ("mov %0, %%dr1" : : "r"(val) : "memory"); 4787f8f7356SKrish Sadhukhan } 4797f8f7356SKrish Sadhukhan 4807f8f7356SKrish Sadhukhan static inline void write_dr2(void *val) 4817f8f7356SKrish Sadhukhan { 4827f8f7356SKrish Sadhukhan asm volatile ("mov %0, %%dr2" : : "r"(val) : "memory"); 4837f8f7356SKrish Sadhukhan } 4847f8f7356SKrish Sadhukhan 4857f8f7356SKrish Sadhukhan static inline void write_dr3(void *val) 4867f8f7356SKrish Sadhukhan { 4877f8f7356SKrish Sadhukhan asm volatile ("mov %0, %%dr3" : : "r"(val) : "memory"); 4887f8f7356SKrish Sadhukhan } 4897f8f7356SKrish Sadhukhan 4907d36db35SAvi Kivity static inline void write_dr6(ulong val) 4917d36db35SAvi Kivity { 4927d36db35SAvi Kivity asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory"); 4937d36db35SAvi Kivity } 4947d36db35SAvi Kivity 4957d36db35SAvi Kivity static inline ulong read_dr6(void) 4967d36db35SAvi Kivity { 4977d36db35SAvi Kivity ulong val; 4987d36db35SAvi Kivity asm volatile ("mov %%dr6, %0" : "=r"(val)); 4997d36db35SAvi Kivity return val; 5007d36db35SAvi Kivity } 5017d36db35SAvi Kivity 5027d36db35SAvi Kivity static inline void write_dr7(ulong val) 5037d36db35SAvi Kivity { 5047d36db35SAvi Kivity asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory"); 5057d36db35SAvi Kivity } 5067d36db35SAvi Kivity 5077d36db35SAvi Kivity static inline ulong read_dr7(void) 5087d36db35SAvi Kivity { 5097d36db35SAvi Kivity ulong val; 5107d36db35SAvi Kivity asm volatile ("mov %%dr7, %0" : "=r"(val)); 5117d36db35SAvi Kivity return val; 5127d36db35SAvi Kivity } 5137d36db35SAvi Kivity 5147d36db35SAvi Kivity static inline void pause(void) 5157d36db35SAvi Kivity { 5167d36db35SAvi Kivity asm volatile ("pause"); 5177d36db35SAvi Kivity } 5187d36db35SAvi Kivity 5197d36db35SAvi Kivity static inline void cli(void) 5207d36db35SAvi Kivity { 5217d36db35SAvi Kivity asm volatile ("cli"); 5227d36db35SAvi Kivity } 5237d36db35SAvi Kivity 5247d36db35SAvi Kivity static inline void sti(void) 5257d36db35SAvi Kivity { 5267d36db35SAvi Kivity asm volatile ("sti"); 5277d36db35SAvi Kivity } 5287d36db35SAvi Kivity 529520e2789SBabu Moger static inline unsigned long long rdrand(void) 530520e2789SBabu Moger { 531520e2789SBabu Moger long long r; 532520e2789SBabu Moger 533520e2789SBabu Moger asm volatile("rdrand %0\n\t" 534520e2789SBabu Moger "jc 1f\n\t" 535520e2789SBabu Moger "mov $0, %0\n\t" 536520e2789SBabu Moger "1:\n\t" : "=r" (r)); 537520e2789SBabu Moger return r; 538520e2789SBabu Moger } 539520e2789SBabu Moger 5407db17e21SThomas Huth static inline unsigned long long rdtsc(void) 5410d7251beSJason Wang { 5420d7251beSJason Wang long long r; 5430d7251beSJason Wang 5440d7251beSJason Wang #ifdef __x86_64__ 5450d7251beSJason Wang unsigned a, d; 5460d7251beSJason Wang 5470d7251beSJason Wang asm volatile ("rdtsc" : "=a"(a), "=d"(d)); 5480d7251beSJason Wang r = a | ((long long)d << 32); 5490d7251beSJason Wang #else 5500d7251beSJason Wang asm volatile ("rdtsc" : "=A"(r)); 5510d7251beSJason Wang #endif 5520d7251beSJason Wang return r; 5530d7251beSJason Wang } 5540d7251beSJason Wang 555b49a1a6dSJim Mattson /* 556b49a1a6dSJim Mattson * Per the advice in the SDM, volume 2, the sequence "mfence; lfence" 557b49a1a6dSJim Mattson * executed immediately before rdtsc ensures that rdtsc will be 558b49a1a6dSJim Mattson * executed only after all previous instructions have executed and all 559b49a1a6dSJim Mattson * previous loads and stores are globally visible. In addition, the 560b49a1a6dSJim Mattson * lfence immediately after rdtsc ensures that rdtsc will be executed 561b49a1a6dSJim Mattson * prior to the execution of any subsequent instruction. 562b49a1a6dSJim Mattson */ 563b49a1a6dSJim Mattson static inline unsigned long long fenced_rdtsc(void) 564b49a1a6dSJim Mattson { 565b49a1a6dSJim Mattson unsigned long long tsc; 566b49a1a6dSJim Mattson 567b49a1a6dSJim Mattson #ifdef __x86_64__ 568b49a1a6dSJim Mattson unsigned int eax, edx; 569b49a1a6dSJim Mattson 570b49a1a6dSJim Mattson asm volatile ("mfence; lfence; rdtsc; lfence" : "=a"(eax), "=d"(edx)); 571b49a1a6dSJim Mattson tsc = eax | ((unsigned long long)edx << 32); 572b49a1a6dSJim Mattson #else 573b49a1a6dSJim Mattson asm volatile ("mfence; lfence; rdtsc; lfence" : "=A"(tsc)); 574b49a1a6dSJim Mattson #endif 575b49a1a6dSJim Mattson return tsc; 576b49a1a6dSJim Mattson } 577b49a1a6dSJim Mattson 578867f820dSPaolo Bonzini static inline unsigned long long rdtscp(u32 *aux) 579867f820dSPaolo Bonzini { 580867f820dSPaolo Bonzini long long r; 581867f820dSPaolo Bonzini 582867f820dSPaolo Bonzini #ifdef __x86_64__ 583867f820dSPaolo Bonzini unsigned a, d; 584867f820dSPaolo Bonzini 585867f820dSPaolo Bonzini asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux)); 586867f820dSPaolo Bonzini r = a | ((long long)d << 32); 587867f820dSPaolo Bonzini #else 588867f820dSPaolo Bonzini asm volatile ("rdtscp" : "=A"(r), "=c"(*aux)); 589867f820dSPaolo Bonzini #endif 590867f820dSPaolo Bonzini return r; 591867f820dSPaolo Bonzini } 592867f820dSPaolo Bonzini 5930d7251beSJason Wang static inline void wrtsc(u64 tsc) 5940d7251beSJason Wang { 595*c47292f4SJim Mattson wrmsr(MSR_IA32_TSC, tsc); 5960d7251beSJason Wang } 5970d7251beSJason Wang 598ae0a920bSGleb Natapov static inline void irq_disable(void) 599ae0a920bSGleb Natapov { 600ae0a920bSGleb Natapov asm volatile("cli"); 601ae0a920bSGleb Natapov } 602ae0a920bSGleb Natapov 603132c700dSPeter Feiner /* Note that irq_enable() does not ensure an interrupt shadow due 604132c700dSPeter Feiner * to the vagaries of compiler optimizations. If you need the 605132c700dSPeter Feiner * shadow, use a single asm with "sti" and the instruction after it. 606132c700dSPeter Feiner */ 607ae0a920bSGleb Natapov static inline void irq_enable(void) 608ae0a920bSGleb Natapov { 609ae0a920bSGleb Natapov asm volatile("sti"); 610ae0a920bSGleb Natapov } 611ae0a920bSGleb Natapov 612fa6318d1SPaolo Bonzini static inline void invlpg(volatile void *va) 6134029c34bSGleb Natapov { 6144029c34bSGleb Natapov asm volatile("invlpg (%0)" ::"r" (va) : "memory"); 6154029c34bSGleb Natapov } 616334cd2bfSGleb Natapov 617334cd2bfSGleb Natapov static inline void safe_halt(void) 618334cd2bfSGleb Natapov { 619334cd2bfSGleb Natapov asm volatile("sti; hlt"); 620334cd2bfSGleb Natapov } 6219d7eaa29SArthur Chunqi Li 622e94079c5SPaolo Bonzini static inline u32 read_pkru(void) 623e94079c5SPaolo Bonzini { 624e94079c5SPaolo Bonzini unsigned int eax, edx; 625e94079c5SPaolo Bonzini unsigned int ecx = 0; 626e94079c5SPaolo Bonzini unsigned int pkru; 627e94079c5SPaolo Bonzini 628e94079c5SPaolo Bonzini asm volatile(".byte 0x0f,0x01,0xee\n\t" 629e94079c5SPaolo Bonzini : "=a" (eax), "=d" (edx) 630e94079c5SPaolo Bonzini : "c" (ecx)); 631e94079c5SPaolo Bonzini pkru = eax; 632e94079c5SPaolo Bonzini return pkru; 633e94079c5SPaolo Bonzini } 634e94079c5SPaolo Bonzini 635e94079c5SPaolo Bonzini static inline void write_pkru(u32 pkru) 636e94079c5SPaolo Bonzini { 637e94079c5SPaolo Bonzini unsigned int eax = pkru; 638e94079c5SPaolo Bonzini unsigned int ecx = 0; 639e94079c5SPaolo Bonzini unsigned int edx = 0; 640e94079c5SPaolo Bonzini 641e94079c5SPaolo Bonzini asm volatile(".byte 0x0f,0x01,0xef\n\t" 642e94079c5SPaolo Bonzini : : "a" (eax), "c" (ecx), "d" (edx)); 643e94079c5SPaolo Bonzini } 644e94079c5SPaolo Bonzini 645aedfd771SJim Mattson static inline bool is_canonical(u64 addr) 646aedfd771SJim Mattson { 647f4a8b68cSLara Lazier int va_width = (raw_cpuid(0x80000008, 0).a & 0xff00) >> 8; 648f4a8b68cSLara Lazier int shift_amt = 64 - va_width; 649f4a8b68cSLara Lazier 650f4a8b68cSLara Lazier return (s64)(addr << shift_amt) >> shift_amt == addr; 651aedfd771SJim Mattson } 652aedfd771SJim Mattson 653e60c87fdSLiran Alon static inline void clear_bit(int bit, u8 *addr) 654e60c87fdSLiran Alon { 655e60c87fdSLiran Alon __asm__ __volatile__("btr %1, %0" 656e60c87fdSLiran Alon : "+m" (*addr) : "Ir" (bit) : "cc", "memory"); 657e60c87fdSLiran Alon } 658e60c87fdSLiran Alon 659e60c87fdSLiran Alon static inline void set_bit(int bit, u8 *addr) 660e60c87fdSLiran Alon { 661e60c87fdSLiran Alon __asm__ __volatile__("bts %1, %0" 662e60c87fdSLiran Alon : "+m" (*addr) : "Ir" (bit) : "cc", "memory"); 663e60c87fdSLiran Alon } 664e60c87fdSLiran Alon 6655868743aSMarc Orr static inline void flush_tlb(void) 6665868743aSMarc Orr { 6675868743aSMarc Orr ulong cr4; 6685868743aSMarc Orr 6695868743aSMarc Orr cr4 = read_cr4(); 6705868743aSMarc Orr write_cr4(cr4 ^ X86_CR4_PGE); 6715868743aSMarc Orr write_cr4(cr4); 6725868743aSMarc Orr } 6735868743aSMarc Orr 674bdc714e0SMarc Orr static inline int has_spec_ctrl(void) 675bdc714e0SMarc Orr { 676badc98caSKrish Sadhukhan return !!(this_cpu_has(X86_FEATURE_SPEC_CTRL)); 677bdc714e0SMarc Orr } 678bdc714e0SMarc Orr 6791ec91c01SKrish Sadhukhan static inline int cpu_has_efer_nx(void) 6801ec91c01SKrish Sadhukhan { 681badc98caSKrish Sadhukhan return !!(this_cpu_has(X86_FEATURE_NX)); 6821ec91c01SKrish Sadhukhan } 6831ec91c01SKrish Sadhukhan 68474a66858SJim Mattson static inline bool cpuid_osxsave(void) 68574a66858SJim Mattson { 68674a66858SJim Mattson return cpuid(1).c & (1 << (X86_FEATURE_OSXSAVE % 32)); 68774a66858SJim Mattson } 68874a66858SJim Mattson 6897d36db35SAvi Kivity #endif 690