xref: /kvm-unit-tests/lib/x86/processor.h (revision 85c21181f45ca45e3dc7a7ba6551c44f73c70cd2)
1c865f654SCornelia Huck #ifndef _X86_PROCESSOR_H_
2c865f654SCornelia Huck #define _X86_PROCESSOR_H_
37d36db35SAvi Kivity 
47d36db35SAvi Kivity #include "libcflat.h"
5142ff635SSean Christopherson #include "desc.h"
6867f820dSPaolo Bonzini #include "msr.h"
7e2f3fe1dSSean Christopherson #include <bitops.h>
82b2d7aadSAvi Kivity #include <stdint.h>
97d36db35SAvi Kivity 
1088f0bb17SSean Christopherson #define NONCANONICAL	0xaaaaaaaaaaaaaaaaull
1188f0bb17SSean Christopherson 
12a3d1fb55SPaolo Bonzini #ifdef __x86_64__
13a3d1fb55SPaolo Bonzini #  define R "r"
14a3d1fb55SPaolo Bonzini #  define W "q"
15a3d1fb55SPaolo Bonzini #  define S "8"
16a3d1fb55SPaolo Bonzini #else
17a3d1fb55SPaolo Bonzini #  define R "e"
18a3d1fb55SPaolo Bonzini #  define W "l"
19a3d1fb55SPaolo Bonzini #  define S "4"
20a3d1fb55SPaolo Bonzini #endif
21a3d1fb55SPaolo Bonzini 
222b934609SXiaoyao Li #define DB_VECTOR 1
232b934609SXiaoyao Li #define BP_VECTOR 3
242b934609SXiaoyao Li #define UD_VECTOR 6
258d2cdb35SMarc Orr #define DF_VECTOR 8
268d2cdb35SMarc Orr #define TS_VECTOR 10
278d2cdb35SMarc Orr #define NP_VECTOR 11
288d2cdb35SMarc Orr #define SS_VECTOR 12
298d2cdb35SMarc Orr #define GP_VECTOR 13
308d2cdb35SMarc Orr #define PF_VECTOR 14
318d2cdb35SMarc Orr #define AC_VECTOR 17
32c986dbe8SNadav Amit #define CP_VECTOR 21
338d2cdb35SMarc Orr 
34a106b30dSPaolo Bonzini #define X86_CR0_PE_BIT		(0)
35a106b30dSPaolo Bonzini #define X86_CR0_PE		BIT(X86_CR0_PE_BIT)
36a106b30dSPaolo Bonzini #define X86_CR0_MP_BIT		(1)
37a106b30dSPaolo Bonzini #define X86_CR0_MP		BIT(X86_CR0_MP_BIT)
38a106b30dSPaolo Bonzini #define X86_CR0_EM_BIT		(2)
39a106b30dSPaolo Bonzini #define X86_CR0_EM		BIT(X86_CR0_EM_BIT)
40a106b30dSPaolo Bonzini #define X86_CR0_TS_BIT		(3)
41a106b30dSPaolo Bonzini #define X86_CR0_TS		BIT(X86_CR0_TS_BIT)
42a106b30dSPaolo Bonzini #define X86_CR0_ET_BIT		(4)
43a106b30dSPaolo Bonzini #define X86_CR0_ET		BIT(X86_CR0_ET_BIT)
44a106b30dSPaolo Bonzini #define X86_CR0_NE_BIT		(5)
45a106b30dSPaolo Bonzini #define X86_CR0_NE		BIT(X86_CR0_NE_BIT)
46a106b30dSPaolo Bonzini #define X86_CR0_WP_BIT		(16)
47a106b30dSPaolo Bonzini #define X86_CR0_WP		BIT(X86_CR0_WP_BIT)
48a106b30dSPaolo Bonzini #define X86_CR0_AM_BIT		(18)
49a106b30dSPaolo Bonzini #define X86_CR0_AM		BIT(X86_CR0_AM_BIT)
50a106b30dSPaolo Bonzini #define X86_CR0_NW_BIT		(29)
51a106b30dSPaolo Bonzini #define X86_CR0_NW		BIT(X86_CR0_NW_BIT)
52a106b30dSPaolo Bonzini #define X86_CR0_CD_BIT		(30)
53a106b30dSPaolo Bonzini #define X86_CR0_CD		BIT(X86_CR0_CD_BIT)
54a106b30dSPaolo Bonzini #define X86_CR0_PG_BIT		(31)
55a106b30dSPaolo Bonzini #define X86_CR0_PG		BIT(X86_CR0_PG_BIT)
56b6a0ff03SPaolo Bonzini 
57e2f3fe1dSSean Christopherson #define X86_CR3_PCID_MASK	GENMASK(11, 0)
58e2f3fe1dSSean Christopherson 
59a106b30dSPaolo Bonzini #define X86_CR4_VME_BIT		(0)
60a106b30dSPaolo Bonzini #define X86_CR4_VME		BIT(X86_CR4_VME_BIT)
61a106b30dSPaolo Bonzini #define X86_CR4_PVI_BIT		(1)
62a106b30dSPaolo Bonzini #define X86_CR4_PVI		BIT(X86_CR4_PVI_BIT)
63a106b30dSPaolo Bonzini #define X86_CR4_TSD_BIT		(2)
64a106b30dSPaolo Bonzini #define X86_CR4_TSD		BIT(X86_CR4_TSD_BIT)
65a106b30dSPaolo Bonzini #define X86_CR4_DE_BIT		(3)
66a106b30dSPaolo Bonzini #define X86_CR4_DE		BIT(X86_CR4_DE_BIT)
67a106b30dSPaolo Bonzini #define X86_CR4_PSE_BIT		(4)
68a106b30dSPaolo Bonzini #define X86_CR4_PSE		BIT(X86_CR4_PSE_BIT)
69a106b30dSPaolo Bonzini #define X86_CR4_PAE_BIT		(5)
70a106b30dSPaolo Bonzini #define X86_CR4_PAE		BIT(X86_CR4_PAE_BIT)
71a106b30dSPaolo Bonzini #define X86_CR4_MCE_BIT		(6)
72a106b30dSPaolo Bonzini #define X86_CR4_MCE		BIT(X86_CR4_MCE_BIT)
73a106b30dSPaolo Bonzini #define X86_CR4_PGE_BIT		(7)
74a106b30dSPaolo Bonzini #define X86_CR4_PGE		BIT(X86_CR4_PGE_BIT)
75a106b30dSPaolo Bonzini #define X86_CR4_PCE_BIT		(8)
76a106b30dSPaolo Bonzini #define X86_CR4_PCE		BIT(X86_CR4_PCE_BIT)
77a106b30dSPaolo Bonzini #define X86_CR4_OSFXSR_BIT	(9)
78a106b30dSPaolo Bonzini #define X86_CR4_OSFXSR		BIT(X86_CR4_OSFXSR_BIT)
79a106b30dSPaolo Bonzini #define X86_CR4_OSXMMEXCPT_BIT	(10)
80a106b30dSPaolo Bonzini #define X86_CR4_OSXMMEXCPT	BIT(X86_CR4_OSXMMEXCPT_BIT)
81a106b30dSPaolo Bonzini #define X86_CR4_UMIP_BIT	(11)
82a106b30dSPaolo Bonzini #define X86_CR4_UMIP		BIT(X86_CR4_UMIP_BIT)
83a106b30dSPaolo Bonzini #define X86_CR4_LA57_BIT	(12)
84a106b30dSPaolo Bonzini #define X86_CR4_LA57		BIT(X86_CR4_LA57_BIT)
85a106b30dSPaolo Bonzini #define X86_CR4_VMXE_BIT	(13)
86a106b30dSPaolo Bonzini #define X86_CR4_VMXE		BIT(X86_CR4_VMXE_BIT)
87a106b30dSPaolo Bonzini #define X86_CR4_SMXE_BIT	(14)
88a106b30dSPaolo Bonzini #define X86_CR4_SMXE		BIT(X86_CR4_SMXE_BIT)
89a106b30dSPaolo Bonzini /* UNUSED			(15) */
90a106b30dSPaolo Bonzini #define X86_CR4_FSGSBASE_BIT	(16)
91a106b30dSPaolo Bonzini #define X86_CR4_FSGSBASE	BIT(X86_CR4_FSGSBASE_BIT)
92a106b30dSPaolo Bonzini #define X86_CR4_PCIDE_BIT	(17)
93a106b30dSPaolo Bonzini #define X86_CR4_PCIDE		BIT(X86_CR4_PCIDE_BIT)
94a106b30dSPaolo Bonzini #define X86_CR4_OSXSAVE_BIT	(18)
95a106b30dSPaolo Bonzini #define X86_CR4_OSXSAVE		BIT(X86_CR4_OSXSAVE_BIT)
96a106b30dSPaolo Bonzini #define X86_CR4_KL_BIT		(19)
97a106b30dSPaolo Bonzini #define X86_CR4_KL		BIT(X86_CR4_KL_BIT)
98a106b30dSPaolo Bonzini #define X86_CR4_SMEP_BIT	(20)
99a106b30dSPaolo Bonzini #define X86_CR4_SMEP		BIT(X86_CR4_SMEP_BIT)
100a106b30dSPaolo Bonzini #define X86_CR4_SMAP_BIT	(21)
101a106b30dSPaolo Bonzini #define X86_CR4_SMAP		BIT(X86_CR4_SMAP_BIT)
102a106b30dSPaolo Bonzini #define X86_CR4_PKE_BIT		(22)
103a106b30dSPaolo Bonzini #define X86_CR4_PKE		BIT(X86_CR4_PKE_BIT)
104a106b30dSPaolo Bonzini #define X86_CR4_CET_BIT		(23)
105a106b30dSPaolo Bonzini #define X86_CR4_CET		BIT(X86_CR4_CET_BIT)
106a106b30dSPaolo Bonzini #define X86_CR4_PKS_BIT		(24)
107a106b30dSPaolo Bonzini #define X86_CR4_PKS		BIT(X86_CR4_PKS_BIT)
108e2f3fe1dSSean Christopherson 
109a106b30dSPaolo Bonzini #define X86_EFLAGS_CF_BIT	(0)
110a106b30dSPaolo Bonzini #define X86_EFLAGS_CF		BIT(X86_EFLAGS_CF_BIT)
111a106b30dSPaolo Bonzini #define X86_EFLAGS_FIXED_BIT	(1)
112a106b30dSPaolo Bonzini #define X86_EFLAGS_FIXED	BIT(X86_EFLAGS_FIXED_BIT)
113a106b30dSPaolo Bonzini #define X86_EFLAGS_PF_BIT	(2)
114a106b30dSPaolo Bonzini #define X86_EFLAGS_PF		BIT(X86_EFLAGS_PF_BIT)
115a106b30dSPaolo Bonzini /* RESERVED 0			(3) */
116a106b30dSPaolo Bonzini #define X86_EFLAGS_AF_BIT	(4)
117a106b30dSPaolo Bonzini #define X86_EFLAGS_AF		BIT(X86_EFLAGS_AF_BIT)
118a106b30dSPaolo Bonzini /* RESERVED 0			(5) */
119a106b30dSPaolo Bonzini #define X86_EFLAGS_ZF_BIT	(6)
120a106b30dSPaolo Bonzini #define X86_EFLAGS_ZF		BIT(X86_EFLAGS_ZF_BIT)
121a106b30dSPaolo Bonzini #define X86_EFLAGS_SF_BIT	(7)
122a106b30dSPaolo Bonzini #define X86_EFLAGS_SF		BIT(X86_EFLAGS_SF_BIT)
123a106b30dSPaolo Bonzini #define X86_EFLAGS_TF_BIT	(8)
124a106b30dSPaolo Bonzini #define X86_EFLAGS_TF		BIT(X86_EFLAGS_TF_BIT)
125a106b30dSPaolo Bonzini #define X86_EFLAGS_IF_BIT	(9)
126a106b30dSPaolo Bonzini #define X86_EFLAGS_IF		BIT(X86_EFLAGS_IF_BIT)
127a106b30dSPaolo Bonzini #define X86_EFLAGS_DF_BIT	(10)
128a106b30dSPaolo Bonzini #define X86_EFLAGS_DF		BIT(X86_EFLAGS_DF_BIT)
129a106b30dSPaolo Bonzini #define X86_EFLAGS_OF_BIT	(11)
130a106b30dSPaolo Bonzini #define X86_EFLAGS_OF		BIT(X86_EFLAGS_OF_BIT)
131e2f3fe1dSSean Christopherson #define X86_EFLAGS_IOPL		GENMASK(13, 12)
132a106b30dSPaolo Bonzini #define X86_EFLAGS_NT_BIT	(14)
133a106b30dSPaolo Bonzini #define X86_EFLAGS_NT		BIT(X86_EFLAGS_NT_BIT)
134a106b30dSPaolo Bonzini /* RESERVED 0			(15) */
135a106b30dSPaolo Bonzini #define X86_EFLAGS_RF_BIT	(16)
136a106b30dSPaolo Bonzini #define X86_EFLAGS_RF		BIT(X86_EFLAGS_RF_BIT)
137a106b30dSPaolo Bonzini #define X86_EFLAGS_VM_BIT	(17)
138a106b30dSPaolo Bonzini #define X86_EFLAGS_VM		BIT(X86_EFLAGS_VM_BIT)
139a106b30dSPaolo Bonzini #define X86_EFLAGS_AC_BIT	(18)
140a106b30dSPaolo Bonzini #define X86_EFLAGS_AC		BIT(X86_EFLAGS_AC_BIT)
141a106b30dSPaolo Bonzini #define X86_EFLAGS_VIF_BIT	(19)
142a106b30dSPaolo Bonzini #define X86_EFLAGS_VIF		BIT(X86_EFLAGS_VIF_BIT)
143a106b30dSPaolo Bonzini #define X86_EFLAGS_VIP_BIT	(20)
144a106b30dSPaolo Bonzini #define X86_EFLAGS_VIP		BIT(X86_EFLAGS_VIP_BIT)
145a106b30dSPaolo Bonzini #define X86_EFLAGS_ID_BIT	(21)
146a106b30dSPaolo Bonzini #define X86_EFLAGS_ID		BIT(X86_EFLAGS_ID_BIT)
1477488d290SPaolo Bonzini 
14860d8090cSLiran Alon #define X86_EFLAGS_ALU (X86_EFLAGS_CF | X86_EFLAGS_PF | X86_EFLAGS_AF | \
14960d8090cSLiran Alon 			X86_EFLAGS_ZF | X86_EFLAGS_SF | X86_EFLAGS_OF)
15060d8090cSLiran Alon 
151a3d1fb55SPaolo Bonzini 
1526ddcc298SKrish Sadhukhan /*
1536ddcc298SKrish Sadhukhan  * CPU features
1546ddcc298SKrish Sadhukhan  */
1556ddcc298SKrish Sadhukhan 
1566ddcc298SKrish Sadhukhan enum cpuid_output_regs {
1576ddcc298SKrish Sadhukhan 	EAX,
1586ddcc298SKrish Sadhukhan 	EBX,
1596ddcc298SKrish Sadhukhan 	ECX,
1606ddcc298SKrish Sadhukhan 	EDX
1616ddcc298SKrish Sadhukhan };
1626ddcc298SKrish Sadhukhan 
1636ddcc298SKrish Sadhukhan struct cpuid { u32 a, b, c, d; };
1646ddcc298SKrish Sadhukhan 
1656ddcc298SKrish Sadhukhan static inline struct cpuid raw_cpuid(u32 function, u32 index)
1666ddcc298SKrish Sadhukhan {
1676ddcc298SKrish Sadhukhan 	struct cpuid r;
1686ddcc298SKrish Sadhukhan 	asm volatile ("cpuid"
1696ddcc298SKrish Sadhukhan 		      : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d)
1706ddcc298SKrish Sadhukhan 		      : "0"(function), "2"(index));
1716ddcc298SKrish Sadhukhan 	return r;
1726ddcc298SKrish Sadhukhan }
1736ddcc298SKrish Sadhukhan 
1746ddcc298SKrish Sadhukhan static inline struct cpuid cpuid_indexed(u32 function, u32 index)
1756ddcc298SKrish Sadhukhan {
1766ddcc298SKrish Sadhukhan 	u32 level = raw_cpuid(function & 0xf0000000, 0).a;
1776ddcc298SKrish Sadhukhan 	if (level < function)
1786ddcc298SKrish Sadhukhan 	return (struct cpuid) { 0, 0, 0, 0 };
1796ddcc298SKrish Sadhukhan 	return raw_cpuid(function, index);
1806ddcc298SKrish Sadhukhan }
1816ddcc298SKrish Sadhukhan 
1826ddcc298SKrish Sadhukhan static inline struct cpuid cpuid(u32 function)
1836ddcc298SKrish Sadhukhan {
1846ddcc298SKrish Sadhukhan 	return cpuid_indexed(function, 0);
1856ddcc298SKrish Sadhukhan }
1866ddcc298SKrish Sadhukhan 
1876ddcc298SKrish Sadhukhan static inline u8 cpuid_maxphyaddr(void)
1886ddcc298SKrish Sadhukhan {
1896ddcc298SKrish Sadhukhan 	if (raw_cpuid(0x80000000, 0).a < 0x80000008)
1906ddcc298SKrish Sadhukhan 	return 36;
1916ddcc298SKrish Sadhukhan 	return raw_cpuid(0x80000008, 0).a & 0xff;
1926ddcc298SKrish Sadhukhan }
1936ddcc298SKrish Sadhukhan 
19422abdd39SNadav Amit static inline bool is_intel(void)
19522abdd39SNadav Amit {
19622abdd39SNadav Amit 	struct cpuid c = cpuid(0);
19722abdd39SNadav Amit 	u32 name[4] = {c.b, c.d, c.c };
19822abdd39SNadav Amit 
19922abdd39SNadav Amit 	return strcmp((char *)name, "GenuineIntel") == 0;
20022abdd39SNadav Amit }
20122abdd39SNadav Amit 
2026ddcc298SKrish Sadhukhan #define	CPUID(a, b, c, d) ((((unsigned long long) a) << 32) | (b << 16) | \
2036ddcc298SKrish Sadhukhan 			  (c << 8) | d)
2046ddcc298SKrish Sadhukhan 
2056ddcc298SKrish Sadhukhan /*
2066ddcc298SKrish Sadhukhan  * Each X86_FEATURE_XXX definition is 64-bit and contains the following
2076ddcc298SKrish Sadhukhan  * CPUID meta-data:
2086ddcc298SKrish Sadhukhan  *
2096ddcc298SKrish Sadhukhan  * 	[63:32] :  input value for EAX
2106ddcc298SKrish Sadhukhan  * 	[31:16] :  input value for ECX
2116ddcc298SKrish Sadhukhan  * 	[15:8]  :  output register
2126ddcc298SKrish Sadhukhan  * 	[7:0]   :  bit position in output register
2136ddcc298SKrish Sadhukhan  */
2146ddcc298SKrish Sadhukhan 
2156ddcc298SKrish Sadhukhan /*
216b52bf046SSean Christopherson  * Basic Leafs, a.k.a. Intel defined
2176ddcc298SKrish Sadhukhan  */
2186ddcc298SKrish Sadhukhan #define	X86_FEATURE_MWAIT		(CPUID(0x1, 0, ECX, 3))
2196ddcc298SKrish Sadhukhan #define	X86_FEATURE_VMX			(CPUID(0x1, 0, ECX, 5))
2209d9000a5SYang Weijiang #define	X86_FEATURE_PDCM		(CPUID(0x1, 0, ECX, 15))
2216ddcc298SKrish Sadhukhan #define	X86_FEATURE_PCID		(CPUID(0x1, 0, ECX, 17))
222baf248c5SSean Christopherson #define X86_FEATURE_X2APIC		(CPUID(0x1, 0, ECX, 21))
2236ddcc298SKrish Sadhukhan #define	X86_FEATURE_MOVBE		(CPUID(0x1, 0, ECX, 22))
2246ddcc298SKrish Sadhukhan #define	X86_FEATURE_TSC_DEADLINE_TIMER	(CPUID(0x1, 0, ECX, 24))
2256ddcc298SKrish Sadhukhan #define	X86_FEATURE_XSAVE		(CPUID(0x1, 0, ECX, 26))
2266ddcc298SKrish Sadhukhan #define	X86_FEATURE_OSXSAVE		(CPUID(0x1, 0, ECX, 27))
2276ddcc298SKrish Sadhukhan #define	X86_FEATURE_RDRAND		(CPUID(0x1, 0, ECX, 30))
2286ddcc298SKrish Sadhukhan #define	X86_FEATURE_MCE			(CPUID(0x1, 0, EDX, 7))
2296ddcc298SKrish Sadhukhan #define	X86_FEATURE_APIC		(CPUID(0x1, 0, EDX, 9))
2306ddcc298SKrish Sadhukhan #define	X86_FEATURE_CLFLUSH		(CPUID(0x1, 0, EDX, 19))
2316ddcc298SKrish Sadhukhan #define	X86_FEATURE_XMM			(CPUID(0x1, 0, EDX, 25))
2326ddcc298SKrish Sadhukhan #define	X86_FEATURE_XMM2		(CPUID(0x1, 0, EDX, 26))
2336ddcc298SKrish Sadhukhan #define	X86_FEATURE_TSC_ADJUST		(CPUID(0x7, 0, EBX, 1))
2346163f75dSPaolo Bonzini #define	X86_FEATURE_HLE			(CPUID(0x7, 0, EBX, 4))
23597b5f955SSean Christopherson #define	X86_FEATURE_SMEP		(CPUID(0x7, 0, EBX, 7))
2366ddcc298SKrish Sadhukhan #define	X86_FEATURE_INVPCID		(CPUID(0x7, 0, EBX, 10))
2376ddcc298SKrish Sadhukhan #define	X86_FEATURE_RTM			(CPUID(0x7, 0, EBX, 11))
2386ddcc298SKrish Sadhukhan #define	X86_FEATURE_SMAP		(CPUID(0x7, 0, EBX, 20))
2396ddcc298SKrish Sadhukhan #define	X86_FEATURE_PCOMMIT		(CPUID(0x7, 0, EBX, 22))
2406ddcc298SKrish Sadhukhan #define	X86_FEATURE_CLFLUSHOPT		(CPUID(0x7, 0, EBX, 23))
2416ddcc298SKrish Sadhukhan #define	X86_FEATURE_CLWB		(CPUID(0x7, 0, EBX, 24))
2426ddcc298SKrish Sadhukhan #define	X86_FEATURE_UMIP		(CPUID(0x7, 0, ECX, 2))
2436ddcc298SKrish Sadhukhan #define	X86_FEATURE_PKU			(CPUID(0x7, 0, ECX, 3))
2446ddcc298SKrish Sadhukhan #define	X86_FEATURE_LA57		(CPUID(0x7, 0, ECX, 16))
2456ddcc298SKrish Sadhukhan #define	X86_FEATURE_RDPID		(CPUID(0x7, 0, ECX, 22))
24679e53994SYang Weijiang #define	X86_FEATURE_SHSTK		(CPUID(0x7, 0, ECX, 7))
24779e53994SYang Weijiang #define	X86_FEATURE_IBT			(CPUID(0x7, 0, EDX, 20))
2486ddcc298SKrish Sadhukhan #define	X86_FEATURE_SPEC_CTRL		(CPUID(0x7, 0, EDX, 26))
2496163f75dSPaolo Bonzini #define	X86_FEATURE_ARCH_CAPABILITIES	(CPUID(0x7, 0, EDX, 29))
250fdae6092SChenyi Qiang #define	X86_FEATURE_PKS			(CPUID(0x7, 0, ECX, 31))
2516ddcc298SKrish Sadhukhan 
2526ddcc298SKrish Sadhukhan /*
253b52bf046SSean Christopherson  * Extended Leafs, a.k.a. AMD defined
2546ddcc298SKrish Sadhukhan  */
2556ddcc298SKrish Sadhukhan #define	X86_FEATURE_SVM			(CPUID(0x80000001, 0, ECX, 2))
256b52bf046SSean Christopherson #define	X86_FEATURE_NX			(CPUID(0x80000001, 0, EDX, 20))
257b52bf046SSean Christopherson #define	X86_FEATURE_GBPAGES		(CPUID(0x80000001, 0, EDX, 26))
2586ddcc298SKrish Sadhukhan #define	X86_FEATURE_RDTSCP		(CPUID(0x80000001, 0, EDX, 27))
259b52bf046SSean Christopherson #define	X86_FEATURE_LM			(CPUID(0x80000001, 0, EDX, 29))
260b52bf046SSean Christopherson #define	X86_FEATURE_RDPRU		(CPUID(0x80000008, 0, EBX, 4))
2616ddcc298SKrish Sadhukhan #define	X86_FEATURE_AMD_IBPB		(CPUID(0x80000008, 0, EBX, 12))
2626ddcc298SKrish Sadhukhan #define	X86_FEATURE_NPT			(CPUID(0x8000000A, 0, EDX, 0))
263537d39dfSMaxim Levitsky #define	X86_FEATURE_LBRV		(CPUID(0x8000000A, 0, EDX, 1))
2646ddcc298SKrish Sadhukhan #define	X86_FEATURE_NRIPS		(CPUID(0x8000000A, 0, EDX, 3))
265a8503d50SMaxim Levitsky #define X86_FEATURE_TSCRATEMSR		(CPUID(0x8000000A, 0, EDX, 4))
2668650dffeSMaxim Levitsky #define X86_FEATURE_PAUSEFILTER		(CPUID(0x8000000A, 0, EDX, 10))
2678650dffeSMaxim Levitsky #define X86_FEATURE_PFTHRESHOLD		(CPUID(0x8000000A, 0, EDX, 12))
268f6972bd6SLara Lazier #define	X86_FEATURE_VGIF		(CPUID(0x8000000A, 0, EDX, 16))
2696ddcc298SKrish Sadhukhan 
2706ddcc298SKrish Sadhukhan 
2716ddcc298SKrish Sadhukhan static inline bool this_cpu_has(u64 feature)
2726ddcc298SKrish Sadhukhan {
2736ddcc298SKrish Sadhukhan 	u32 input_eax = feature >> 32;
2746ddcc298SKrish Sadhukhan 	u32 input_ecx = (feature >> 16) & 0xffff;
2756ddcc298SKrish Sadhukhan 	u32 output_reg = (feature >> 8) & 0xff;
2766ddcc298SKrish Sadhukhan 	u8 bit = feature & 0xff;
2776ddcc298SKrish Sadhukhan 	struct cpuid c;
2786ddcc298SKrish Sadhukhan 	u32 *tmp;
2796ddcc298SKrish Sadhukhan 
2806ddcc298SKrish Sadhukhan 	c = cpuid_indexed(input_eax, input_ecx);
2816ddcc298SKrish Sadhukhan 	tmp = (u32 *)&c;
2826ddcc298SKrish Sadhukhan 
2836ddcc298SKrish Sadhukhan 	return ((*(tmp + (output_reg % 32))) & (1 << bit));
2846ddcc298SKrish Sadhukhan }
2856ddcc298SKrish Sadhukhan 
2861bde9127SJim Mattson struct far_pointer32 {
2871bde9127SJim Mattson 	u32 offset;
2881bde9127SJim Mattson 	u16 selector;
2891bde9127SJim Mattson } __attribute__((packed));
2901bde9127SJim Mattson 
2917d36db35SAvi Kivity struct descriptor_table_ptr {
2927d36db35SAvi Kivity 	u16 limit;
2937d36db35SAvi Kivity 	ulong base;
2947d36db35SAvi Kivity } __attribute__((packed));
2957d36db35SAvi Kivity 
296fa6318d1SPaolo Bonzini static inline void clac(void)
297fa6318d1SPaolo Bonzini {
298fa6318d1SPaolo Bonzini 	asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory");
299fa6318d1SPaolo Bonzini }
300fa6318d1SPaolo Bonzini 
301fa6318d1SPaolo Bonzini static inline void stac(void)
302fa6318d1SPaolo Bonzini {
303fa6318d1SPaolo Bonzini 	asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory");
304fa6318d1SPaolo Bonzini }
305fa6318d1SPaolo Bonzini 
3067d36db35SAvi Kivity static inline u16 read_cs(void)
3077d36db35SAvi Kivity {
3087d36db35SAvi Kivity 	unsigned val;
3097d36db35SAvi Kivity 
310eab64501SPaolo Bonzini 	asm volatile ("mov %%cs, %0" : "=mr"(val));
3117d36db35SAvi Kivity 	return val;
3127d36db35SAvi Kivity }
3137d36db35SAvi Kivity 
3147d36db35SAvi Kivity static inline u16 read_ds(void)
3157d36db35SAvi Kivity {
3167d36db35SAvi Kivity 	unsigned val;
3177d36db35SAvi Kivity 
318eab64501SPaolo Bonzini 	asm volatile ("mov %%ds, %0" : "=mr"(val));
3197d36db35SAvi Kivity 	return val;
3207d36db35SAvi Kivity }
3217d36db35SAvi Kivity 
3227d36db35SAvi Kivity static inline u16 read_es(void)
3237d36db35SAvi Kivity {
3247d36db35SAvi Kivity 	unsigned val;
3257d36db35SAvi Kivity 
326eab64501SPaolo Bonzini 	asm volatile ("mov %%es, %0" : "=mr"(val));
3277d36db35SAvi Kivity 	return val;
3287d36db35SAvi Kivity }
3297d36db35SAvi Kivity 
3307d36db35SAvi Kivity static inline u16 read_ss(void)
3317d36db35SAvi Kivity {
3327d36db35SAvi Kivity 	unsigned val;
3337d36db35SAvi Kivity 
334eab64501SPaolo Bonzini 	asm volatile ("mov %%ss, %0" : "=mr"(val));
3357d36db35SAvi Kivity 	return val;
3367d36db35SAvi Kivity }
3377d36db35SAvi Kivity 
3387d36db35SAvi Kivity static inline u16 read_fs(void)
3397d36db35SAvi Kivity {
3407d36db35SAvi Kivity 	unsigned val;
3417d36db35SAvi Kivity 
342eab64501SPaolo Bonzini 	asm volatile ("mov %%fs, %0" : "=mr"(val));
3437d36db35SAvi Kivity 	return val;
3447d36db35SAvi Kivity }
3457d36db35SAvi Kivity 
3467d36db35SAvi Kivity static inline u16 read_gs(void)
3477d36db35SAvi Kivity {
3487d36db35SAvi Kivity 	unsigned val;
3497d36db35SAvi Kivity 
350eab64501SPaolo Bonzini 	asm volatile ("mov %%gs, %0" : "=mr"(val));
3517d36db35SAvi Kivity 	return val;
3527d36db35SAvi Kivity }
3537d36db35SAvi Kivity 
35477e03b63SGleb Natapov static inline unsigned long read_rflags(void)
35577e03b63SGleb Natapov {
35677e03b63SGleb Natapov 	unsigned long f;
357eab64501SPaolo Bonzini 	asm volatile ("pushf; pop %0\n\t" : "=rm"(f));
35877e03b63SGleb Natapov 	return f;
35977e03b63SGleb Natapov }
36077e03b63SGleb Natapov 
3617d36db35SAvi Kivity static inline void write_ds(unsigned val)
3627d36db35SAvi Kivity {
363eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory");
3647d36db35SAvi Kivity }
3657d36db35SAvi Kivity 
3667d36db35SAvi Kivity static inline void write_es(unsigned val)
3677d36db35SAvi Kivity {
368eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%es" : : "rm"(val) : "memory");
3697d36db35SAvi Kivity }
3707d36db35SAvi Kivity 
3717d36db35SAvi Kivity static inline void write_ss(unsigned val)
3727d36db35SAvi Kivity {
373eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory");
3747d36db35SAvi Kivity }
3757d36db35SAvi Kivity 
3767d36db35SAvi Kivity static inline void write_fs(unsigned val)
3777d36db35SAvi Kivity {
378eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory");
3797d36db35SAvi Kivity }
3807d36db35SAvi Kivity 
3817d36db35SAvi Kivity static inline void write_gs(unsigned val)
3827d36db35SAvi Kivity {
383eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory");
3847d36db35SAvi Kivity }
3857d36db35SAvi Kivity 
3867488d290SPaolo Bonzini static inline void write_rflags(unsigned long f)
3877488d290SPaolo Bonzini {
388eab64501SPaolo Bonzini 	asm volatile ("push %0; popf\n\t" : : "rm"(f));
3897488d290SPaolo Bonzini }
3907488d290SPaolo Bonzini 
391728e71eeSPaolo Bonzini static inline void set_iopl(int iopl)
392728e71eeSPaolo Bonzini {
393728e71eeSPaolo Bonzini 	unsigned long flags = read_rflags() & ~X86_EFLAGS_IOPL;
394728e71eeSPaolo Bonzini 	flags |= iopl * (X86_EFLAGS_IOPL / 3);
395728e71eeSPaolo Bonzini 	write_rflags(flags);
396728e71eeSPaolo Bonzini }
397728e71eeSPaolo Bonzini 
39864c8b768SSean Christopherson /*
39964c8b768SSean Christopherson  * Don't use the safe variants for rdmsr() or wrmsr().  The exception fixup
40064c8b768SSean Christopherson  * infrastructure uses per-CPU data and thus consumes GS.base.  Various tests
40164c8b768SSean Christopherson  * temporarily modify MSR_GS_BASE and will explode when trying to determine
40264c8b768SSean Christopherson  * whether or not RDMSR/WRMSR faulted.
40364c8b768SSean Christopherson  */
4047d36db35SAvi Kivity static inline u64 rdmsr(u32 index)
4057d36db35SAvi Kivity {
4067d36db35SAvi Kivity 	u32 a, d;
4077d36db35SAvi Kivity 	asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory");
4087d36db35SAvi Kivity 	return a | ((u64)d << 32);
4097d36db35SAvi Kivity }
4107d36db35SAvi Kivity 
4117d36db35SAvi Kivity static inline void wrmsr(u32 index, u64 val)
4127d36db35SAvi Kivity {
4137d36db35SAvi Kivity 	u32 a = val, d = val >> 32;
4147d36db35SAvi Kivity 	asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory");
4157d36db35SAvi Kivity }
4167d36db35SAvi Kivity 
4170a4d8626SSean Christopherson static inline int rdmsr_safe(u32 index, uint64_t *val)
418142ff635SSean Christopherson {
4190a4d8626SSean Christopherson 	uint32_t a, d;
4200a4d8626SSean Christopherson 
421142ff635SSean Christopherson 	asm volatile (ASM_TRY("1f")
422142ff635SSean Christopherson 		      "rdmsr\n\t"
423142ff635SSean Christopherson 		      "1:"
4240a4d8626SSean Christopherson 		      : "=a"(a), "=d"(d)
4250a4d8626SSean Christopherson 		      : "c"(index) : "memory");
4260a4d8626SSean Christopherson 
4270a4d8626SSean Christopherson 	*val = (uint64_t)a | ((uint64_t)d << 32);
428142ff635SSean Christopherson 	return exception_vector();
429142ff635SSean Christopherson }
430142ff635SSean Christopherson 
4314143fbfdSSean Christopherson static inline int wrmsr_safe(u32 index, u64 val)
432142ff635SSean Christopherson {
433142ff635SSean Christopherson 	u32 a = val, d = val >> 32;
434142ff635SSean Christopherson 
435142ff635SSean Christopherson 	asm volatile (ASM_TRY("1f")
436142ff635SSean Christopherson 		      "wrmsr\n\t"
437142ff635SSean Christopherson 		      "1:"
438142ff635SSean Christopherson 		      : : "a"(a), "d"(d), "c"(index) : "memory");
439142ff635SSean Christopherson 	return exception_vector();
440142ff635SSean Christopherson }
441142ff635SSean Christopherson 
442*85c21181SLike Xu static inline int rdpmc_safe(u32 index, uint64_t *val)
4432b2d7aadSAvi Kivity {
4442b2d7aadSAvi Kivity 	uint32_t a, d;
445*85c21181SLike Xu 
446*85c21181SLike Xu 	asm volatile (ASM_TRY("1f")
447*85c21181SLike Xu 		      "rdpmc\n\t"
448*85c21181SLike Xu 		      "1:"
449*85c21181SLike Xu 		      : "=a"(a), "=d"(d) : "c"(index) : "memory");
450*85c21181SLike Xu 	*val = (uint64_t)a | ((uint64_t)d << 32);
451*85c21181SLike Xu 	return exception_vector();
452*85c21181SLike Xu }
453*85c21181SLike Xu 
454*85c21181SLike Xu static inline uint64_t rdpmc(uint32_t index)
455*85c21181SLike Xu {
456*85c21181SLike Xu 	uint64_t val;
457*85c21181SLike Xu 	int vector = rdpmc_safe(index, &val);
458*85c21181SLike Xu 
459*85c21181SLike Xu 	assert_msg(!vector, "Unexpected %s on RDPMC(%d)",
460*85c21181SLike Xu 		   exception_mnemonic(vector), index);
461*85c21181SLike Xu 	return val;
4622b2d7aadSAvi Kivity }
4632b2d7aadSAvi Kivity 
46464c8b768SSean Christopherson static inline int write_cr0_safe(ulong val)
46564c8b768SSean Christopherson {
46664c8b768SSean Christopherson 	asm volatile(ASM_TRY("1f")
46764c8b768SSean Christopherson 		     "mov %0,%%cr0\n\t"
46864c8b768SSean Christopherson 		     "1:": : "r" (val));
46964c8b768SSean Christopherson 	return exception_vector();
47064c8b768SSean Christopherson }
47164c8b768SSean Christopherson 
4727d36db35SAvi Kivity static inline void write_cr0(ulong val)
4737d36db35SAvi Kivity {
47464c8b768SSean Christopherson 	int vector = write_cr0_safe(val);
47564c8b768SSean Christopherson 
47664c8b768SSean Christopherson 	assert_msg(!vector, "Unexpected fault '%d' writing CR0 = %lx",
47764c8b768SSean Christopherson 		   vector, val);
4787d36db35SAvi Kivity }
4797d36db35SAvi Kivity 
4807d36db35SAvi Kivity static inline ulong read_cr0(void)
4817d36db35SAvi Kivity {
4827d36db35SAvi Kivity 	ulong val;
4837d36db35SAvi Kivity 	asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory");
4847d36db35SAvi Kivity 	return val;
4857d36db35SAvi Kivity }
4867d36db35SAvi Kivity 
4877d36db35SAvi Kivity static inline void write_cr2(ulong val)
4887d36db35SAvi Kivity {
4897d36db35SAvi Kivity 	asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory");
4907d36db35SAvi Kivity }
4917d36db35SAvi Kivity 
4927d36db35SAvi Kivity static inline ulong read_cr2(void)
4937d36db35SAvi Kivity {
4947d36db35SAvi Kivity 	ulong val;
4957d36db35SAvi Kivity 	asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory");
4967d36db35SAvi Kivity 	return val;
4977d36db35SAvi Kivity }
4987d36db35SAvi Kivity 
49964c8b768SSean Christopherson static inline int write_cr3_safe(ulong val)
50064c8b768SSean Christopherson {
50164c8b768SSean Christopherson 	asm volatile(ASM_TRY("1f")
50264c8b768SSean Christopherson 		     "mov %0,%%cr3\n\t"
50364c8b768SSean Christopherson 		     "1:": : "r" (val));
50464c8b768SSean Christopherson 	return exception_vector();
50564c8b768SSean Christopherson }
50664c8b768SSean Christopherson 
5077d36db35SAvi Kivity static inline void write_cr3(ulong val)
5087d36db35SAvi Kivity {
50964c8b768SSean Christopherson 	int vector = write_cr3_safe(val);
51064c8b768SSean Christopherson 
51164c8b768SSean Christopherson 	assert_msg(!vector, "Unexpected fault '%d' writing CR3 = %lx",
51264c8b768SSean Christopherson 		   vector, val);
5137d36db35SAvi Kivity }
5147d36db35SAvi Kivity 
5157d36db35SAvi Kivity static inline ulong read_cr3(void)
5167d36db35SAvi Kivity {
5177d36db35SAvi Kivity 	ulong val;
5187d36db35SAvi Kivity 	asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory");
5197d36db35SAvi Kivity 	return val;
5207d36db35SAvi Kivity }
5217d36db35SAvi Kivity 
5221c320e18SYadong Qi static inline void update_cr3(void *cr3)
5231c320e18SYadong Qi {
5241c320e18SYadong Qi 	write_cr3((ulong)cr3);
5251c320e18SYadong Qi }
5261c320e18SYadong Qi 
52764c8b768SSean Christopherson static inline int write_cr4_safe(ulong val)
52864c8b768SSean Christopherson {
52964c8b768SSean Christopherson 	asm volatile(ASM_TRY("1f")
53064c8b768SSean Christopherson 		     "mov %0,%%cr4\n\t"
53164c8b768SSean Christopherson 		     "1:": : "r" (val));
53264c8b768SSean Christopherson 	return exception_vector();
53364c8b768SSean Christopherson }
53464c8b768SSean Christopherson 
5357d36db35SAvi Kivity static inline void write_cr4(ulong val)
5367d36db35SAvi Kivity {
53764c8b768SSean Christopherson 	int vector = write_cr4_safe(val);
53864c8b768SSean Christopherson 
53964c8b768SSean Christopherson 	assert_msg(!vector, "Unexpected fault '%d' writing CR4 = %lx",
54064c8b768SSean Christopherson 		   vector, val);
5417d36db35SAvi Kivity }
5427d36db35SAvi Kivity 
5437d36db35SAvi Kivity static inline ulong read_cr4(void)
5447d36db35SAvi Kivity {
5457d36db35SAvi Kivity 	ulong val;
5467d36db35SAvi Kivity 	asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory");
5477d36db35SAvi Kivity 	return val;
5487d36db35SAvi Kivity }
5497d36db35SAvi Kivity 
5507d36db35SAvi Kivity static inline void write_cr8(ulong val)
5517d36db35SAvi Kivity {
5527d36db35SAvi Kivity 	asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory");
5537d36db35SAvi Kivity }
5547d36db35SAvi Kivity 
5557d36db35SAvi Kivity static inline ulong read_cr8(void)
5567d36db35SAvi Kivity {
5577d36db35SAvi Kivity 	ulong val;
5587d36db35SAvi Kivity 	asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory");
5597d36db35SAvi Kivity 	return val;
5607d36db35SAvi Kivity }
5617d36db35SAvi Kivity 
5627d36db35SAvi Kivity static inline void lgdt(const struct descriptor_table_ptr *ptr)
5637d36db35SAvi Kivity {
5647d36db35SAvi Kivity 	asm volatile ("lgdt %0" : : "m"(*ptr));
5657d36db35SAvi Kivity }
5667d36db35SAvi Kivity 
5677d36db35SAvi Kivity static inline void sgdt(struct descriptor_table_ptr *ptr)
5687d36db35SAvi Kivity {
5697d36db35SAvi Kivity 	asm volatile ("sgdt %0" : "=m"(*ptr));
5707d36db35SAvi Kivity }
5717d36db35SAvi Kivity 
5727d36db35SAvi Kivity static inline void lidt(const struct descriptor_table_ptr *ptr)
5737d36db35SAvi Kivity {
5747d36db35SAvi Kivity 	asm volatile ("lidt %0" : : "m"(*ptr));
5757d36db35SAvi Kivity }
5767d36db35SAvi Kivity 
5777d36db35SAvi Kivity static inline void sidt(struct descriptor_table_ptr *ptr)
5787d36db35SAvi Kivity {
5797d36db35SAvi Kivity 	asm volatile ("sidt %0" : "=m"(*ptr));
5807d36db35SAvi Kivity }
5817d36db35SAvi Kivity 
5827a14c1d9SJim Mattson static inline void lldt(u16 val)
5837d36db35SAvi Kivity {
5847d36db35SAvi Kivity 	asm volatile ("lldt %0" : : "rm"(val));
5857d36db35SAvi Kivity }
5867d36db35SAvi Kivity 
5877d36db35SAvi Kivity static inline u16 sldt(void)
5887d36db35SAvi Kivity {
5897d36db35SAvi Kivity 	u16 val;
5907d36db35SAvi Kivity 	asm volatile ("sldt %0" : "=rm"(val));
5917d36db35SAvi Kivity 	return val;
5927d36db35SAvi Kivity }
5937d36db35SAvi Kivity 
594fd5d3dc6SAvi Kivity static inline void ltr(u16 val)
5957d36db35SAvi Kivity {
5967d36db35SAvi Kivity 	asm volatile ("ltr %0" : : "rm"(val));
5977d36db35SAvi Kivity }
5987d36db35SAvi Kivity 
5997d36db35SAvi Kivity static inline u16 str(void)
6007d36db35SAvi Kivity {
6017d36db35SAvi Kivity 	u16 val;
6027d36db35SAvi Kivity 	asm volatile ("str %0" : "=rm"(val));
6037d36db35SAvi Kivity 	return val;
6047d36db35SAvi Kivity }
6057d36db35SAvi Kivity 
6067f8f7356SKrish Sadhukhan static inline void write_dr0(void *val)
6077f8f7356SKrish Sadhukhan {
6087f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr0" : : "r"(val) : "memory");
6097f8f7356SKrish Sadhukhan }
6107f8f7356SKrish Sadhukhan 
6117f8f7356SKrish Sadhukhan static inline void write_dr1(void *val)
6127f8f7356SKrish Sadhukhan {
6137f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr1" : : "r"(val) : "memory");
6147f8f7356SKrish Sadhukhan }
6157f8f7356SKrish Sadhukhan 
6167f8f7356SKrish Sadhukhan static inline void write_dr2(void *val)
6177f8f7356SKrish Sadhukhan {
6187f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr2" : : "r"(val) : "memory");
6197f8f7356SKrish Sadhukhan }
6207f8f7356SKrish Sadhukhan 
6217f8f7356SKrish Sadhukhan static inline void write_dr3(void *val)
6227f8f7356SKrish Sadhukhan {
6237f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr3" : : "r"(val) : "memory");
6247f8f7356SKrish Sadhukhan }
6257f8f7356SKrish Sadhukhan 
6267d36db35SAvi Kivity static inline void write_dr6(ulong val)
6277d36db35SAvi Kivity {
6287d36db35SAvi Kivity 	asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory");
6297d36db35SAvi Kivity }
6307d36db35SAvi Kivity 
6317d36db35SAvi Kivity static inline ulong read_dr6(void)
6327d36db35SAvi Kivity {
6337d36db35SAvi Kivity 	ulong val;
6347d36db35SAvi Kivity 	asm volatile ("mov %%dr6, %0" : "=r"(val));
6357d36db35SAvi Kivity 	return val;
6367d36db35SAvi Kivity }
6377d36db35SAvi Kivity 
6387d36db35SAvi Kivity static inline void write_dr7(ulong val)
6397d36db35SAvi Kivity {
6407d36db35SAvi Kivity 	asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory");
6417d36db35SAvi Kivity }
6427d36db35SAvi Kivity 
6437d36db35SAvi Kivity static inline ulong read_dr7(void)
6447d36db35SAvi Kivity {
6457d36db35SAvi Kivity 	ulong val;
6467d36db35SAvi Kivity 	asm volatile ("mov %%dr7, %0" : "=r"(val));
6477d36db35SAvi Kivity 	return val;
6487d36db35SAvi Kivity }
6497d36db35SAvi Kivity 
6507d36db35SAvi Kivity static inline void pause(void)
6517d36db35SAvi Kivity {
6527d36db35SAvi Kivity 	asm volatile ("pause");
6537d36db35SAvi Kivity }
6547d36db35SAvi Kivity 
6557d36db35SAvi Kivity static inline void cli(void)
6567d36db35SAvi Kivity {
6577d36db35SAvi Kivity 	asm volatile ("cli");
6587d36db35SAvi Kivity }
6597d36db35SAvi Kivity 
6607d36db35SAvi Kivity static inline void sti(void)
6617d36db35SAvi Kivity {
6627d36db35SAvi Kivity 	asm volatile ("sti");
6637d36db35SAvi Kivity }
6647d36db35SAvi Kivity 
665520e2789SBabu Moger static inline unsigned long long rdrand(void)
666520e2789SBabu Moger {
667520e2789SBabu Moger 	long long r;
668520e2789SBabu Moger 
669520e2789SBabu Moger 	asm volatile("rdrand %0\n\t"
670520e2789SBabu Moger 		     "jc 1f\n\t"
671520e2789SBabu Moger 		     "mov $0, %0\n\t"
672520e2789SBabu Moger 		     "1:\n\t" : "=r" (r));
673520e2789SBabu Moger 	return r;
674520e2789SBabu Moger }
675520e2789SBabu Moger 
6767db17e21SThomas Huth static inline unsigned long long rdtsc(void)
6770d7251beSJason Wang {
6780d7251beSJason Wang 	long long r;
6790d7251beSJason Wang 
6800d7251beSJason Wang #ifdef __x86_64__
6810d7251beSJason Wang 	unsigned a, d;
6820d7251beSJason Wang 
6830d7251beSJason Wang 	asm volatile ("rdtsc" : "=a"(a), "=d"(d));
6840d7251beSJason Wang 	r = a | ((long long)d << 32);
6850d7251beSJason Wang #else
6860d7251beSJason Wang 	asm volatile ("rdtsc" : "=A"(r));
6870d7251beSJason Wang #endif
6880d7251beSJason Wang 	return r;
6890d7251beSJason Wang }
6900d7251beSJason Wang 
691b49a1a6dSJim Mattson /*
692b49a1a6dSJim Mattson  * Per the advice in the SDM, volume 2, the sequence "mfence; lfence"
693b49a1a6dSJim Mattson  * executed immediately before rdtsc ensures that rdtsc will be
694b49a1a6dSJim Mattson  * executed only after all previous instructions have executed and all
695b49a1a6dSJim Mattson  * previous loads and stores are globally visible. In addition, the
696b49a1a6dSJim Mattson  * lfence immediately after rdtsc ensures that rdtsc will be executed
697b49a1a6dSJim Mattson  * prior to the execution of any subsequent instruction.
698b49a1a6dSJim Mattson  */
699b49a1a6dSJim Mattson static inline unsigned long long fenced_rdtsc(void)
700b49a1a6dSJim Mattson {
701b49a1a6dSJim Mattson 	unsigned long long tsc;
702b49a1a6dSJim Mattson 
703b49a1a6dSJim Mattson #ifdef __x86_64__
704b49a1a6dSJim Mattson 	unsigned int eax, edx;
705b49a1a6dSJim Mattson 
706b49a1a6dSJim Mattson 	asm volatile ("mfence; lfence; rdtsc; lfence" : "=a"(eax), "=d"(edx));
707b49a1a6dSJim Mattson 	tsc = eax | ((unsigned long long)edx << 32);
708b49a1a6dSJim Mattson #else
709b49a1a6dSJim Mattson 	asm volatile ("mfence; lfence; rdtsc; lfence" : "=A"(tsc));
710b49a1a6dSJim Mattson #endif
711b49a1a6dSJim Mattson 	return tsc;
712b49a1a6dSJim Mattson }
713b49a1a6dSJim Mattson 
714867f820dSPaolo Bonzini static inline unsigned long long rdtscp(u32 *aux)
715867f820dSPaolo Bonzini {
716867f820dSPaolo Bonzini 	long long r;
717867f820dSPaolo Bonzini 
718867f820dSPaolo Bonzini #ifdef __x86_64__
719867f820dSPaolo Bonzini 	unsigned a, d;
720867f820dSPaolo Bonzini 
721867f820dSPaolo Bonzini 	asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux));
722867f820dSPaolo Bonzini 	r = a | ((long long)d << 32);
723867f820dSPaolo Bonzini #else
724867f820dSPaolo Bonzini 	asm volatile ("rdtscp" : "=A"(r), "=c"(*aux));
725867f820dSPaolo Bonzini #endif
726867f820dSPaolo Bonzini 	return r;
727867f820dSPaolo Bonzini }
728867f820dSPaolo Bonzini 
7290d7251beSJason Wang static inline void wrtsc(u64 tsc)
7300d7251beSJason Wang {
731c47292f4SJim Mattson 	wrmsr(MSR_IA32_TSC, tsc);
7320d7251beSJason Wang }
7330d7251beSJason Wang 
734ae0a920bSGleb Natapov static inline void irq_disable(void)
735ae0a920bSGleb Natapov {
736ae0a920bSGleb Natapov 	asm volatile("cli");
737ae0a920bSGleb Natapov }
738ae0a920bSGleb Natapov 
739132c700dSPeter Feiner /* Note that irq_enable() does not ensure an interrupt shadow due
740132c700dSPeter Feiner  * to the vagaries of compiler optimizations.  If you need the
741132c700dSPeter Feiner  * shadow, use a single asm with "sti" and the instruction after it.
742132c700dSPeter Feiner  */
743ae0a920bSGleb Natapov static inline void irq_enable(void)
744ae0a920bSGleb Natapov {
745ae0a920bSGleb Natapov 	asm volatile("sti");
746ae0a920bSGleb Natapov }
747ae0a920bSGleb Natapov 
748fa6318d1SPaolo Bonzini static inline void invlpg(volatile void *va)
7494029c34bSGleb Natapov {
7504029c34bSGleb Natapov 	asm volatile("invlpg (%0)" ::"r" (va) : "memory");
7514029c34bSGleb Natapov }
752334cd2bfSGleb Natapov 
753334cd2bfSGleb Natapov static inline void safe_halt(void)
754334cd2bfSGleb Natapov {
755334cd2bfSGleb Natapov 	asm volatile("sti; hlt");
756334cd2bfSGleb Natapov }
7579d7eaa29SArthur Chunqi Li 
758e94079c5SPaolo Bonzini static inline u32 read_pkru(void)
759e94079c5SPaolo Bonzini {
760e94079c5SPaolo Bonzini 	unsigned int eax, edx;
761e94079c5SPaolo Bonzini 	unsigned int ecx = 0;
762e94079c5SPaolo Bonzini 	unsigned int pkru;
763e94079c5SPaolo Bonzini 
764e94079c5SPaolo Bonzini 	asm volatile(".byte 0x0f,0x01,0xee\n\t"
765e94079c5SPaolo Bonzini 		     : "=a" (eax), "=d" (edx)
766e94079c5SPaolo Bonzini 		     : "c" (ecx));
767e94079c5SPaolo Bonzini 	pkru = eax;
768e94079c5SPaolo Bonzini 	return pkru;
769e94079c5SPaolo Bonzini }
770e94079c5SPaolo Bonzini 
771e94079c5SPaolo Bonzini static inline void write_pkru(u32 pkru)
772e94079c5SPaolo Bonzini {
773e94079c5SPaolo Bonzini 	unsigned int eax = pkru;
774e94079c5SPaolo Bonzini 	unsigned int ecx = 0;
775e94079c5SPaolo Bonzini 	unsigned int edx = 0;
776e94079c5SPaolo Bonzini 
777e94079c5SPaolo Bonzini 	asm volatile(".byte 0x0f,0x01,0xef\n\t"
778e94079c5SPaolo Bonzini 		     : : "a" (eax), "c" (ecx), "d" (edx));
779e94079c5SPaolo Bonzini }
780e94079c5SPaolo Bonzini 
781aedfd771SJim Mattson static inline bool is_canonical(u64 addr)
782aedfd771SJim Mattson {
783f4a8b68cSLara Lazier 	int va_width = (raw_cpuid(0x80000008, 0).a & 0xff00) >> 8;
784f4a8b68cSLara Lazier 	int shift_amt = 64 - va_width;
785f4a8b68cSLara Lazier 
786f4a8b68cSLara Lazier 	return (s64)(addr << shift_amt) >> shift_amt == addr;
787aedfd771SJim Mattson }
788aedfd771SJim Mattson 
789e60c87fdSLiran Alon static inline void clear_bit(int bit, u8 *addr)
790e60c87fdSLiran Alon {
791e60c87fdSLiran Alon 	__asm__ __volatile__("btr %1, %0"
792e60c87fdSLiran Alon 			     : "+m" (*addr) : "Ir" (bit) : "cc", "memory");
793e60c87fdSLiran Alon }
794e60c87fdSLiran Alon 
795e60c87fdSLiran Alon static inline void set_bit(int bit, u8 *addr)
796e60c87fdSLiran Alon {
797e60c87fdSLiran Alon 	__asm__ __volatile__("bts %1, %0"
798e60c87fdSLiran Alon 			     : "+m" (*addr) : "Ir" (bit) : "cc", "memory");
799e60c87fdSLiran Alon }
800e60c87fdSLiran Alon 
8015868743aSMarc Orr static inline void flush_tlb(void)
8025868743aSMarc Orr {
8035868743aSMarc Orr 	ulong cr4;
8045868743aSMarc Orr 
8055868743aSMarc Orr 	cr4 = read_cr4();
8065868743aSMarc Orr 	write_cr4(cr4 ^ X86_CR4_PGE);
8075868743aSMarc Orr 	write_cr4(cr4);
8085868743aSMarc Orr }
8095868743aSMarc Orr 
810e39bee8fSSean Christopherson static inline void generate_non_canonical_gp(void)
811e39bee8fSSean Christopherson {
812e39bee8fSSean Christopherson 	*(volatile u64 *)NONCANONICAL = 0;
813e39bee8fSSean Christopherson }
814e39bee8fSSean Christopherson 
815e39bee8fSSean Christopherson static inline void generate_ud(void)
816e39bee8fSSean Christopherson {
817e39bee8fSSean Christopherson 	asm volatile ("ud2");
818e39bee8fSSean Christopherson }
819e39bee8fSSean Christopherson 
820e39bee8fSSean Christopherson static inline void generate_de(void)
821e39bee8fSSean Christopherson {
822e39bee8fSSean Christopherson 	asm volatile (
823e39bee8fSSean Christopherson 		"xor %%eax, %%eax\n\t"
824e39bee8fSSean Christopherson 		"xor %%ebx, %%ebx\n\t"
825e39bee8fSSean Christopherson 		"xor %%edx, %%edx\n\t"
826e39bee8fSSean Christopherson 		"idiv %%ebx\n\t"
827e39bee8fSSean Christopherson 		::: "eax", "ebx", "edx");
828e39bee8fSSean Christopherson }
829e39bee8fSSean Christopherson 
830e39bee8fSSean Christopherson static inline void generate_bp(void)
831e39bee8fSSean Christopherson {
832e39bee8fSSean Christopherson 	asm volatile ("int3");
833e39bee8fSSean Christopherson }
834e39bee8fSSean Christopherson 
835e39bee8fSSean Christopherson static inline void generate_single_step_db(void)
836e39bee8fSSean Christopherson {
837e39bee8fSSean Christopherson 	write_rflags(read_rflags() | X86_EFLAGS_TF);
838e39bee8fSSean Christopherson 	asm volatile("nop");
839e39bee8fSSean Christopherson }
840e39bee8fSSean Christopherson 
841e39bee8fSSean Christopherson static inline uint64_t generate_usermode_ac(void)
842e39bee8fSSean Christopherson {
843e39bee8fSSean Christopherson 	/*
844e39bee8fSSean Christopherson 	 * Trigger an #AC by writing 8 bytes to a 4-byte aligned address.
845e39bee8fSSean Christopherson 	 * Disclaimer: It is assumed that the stack pointer is aligned
846e39bee8fSSean Christopherson 	 * on a 16-byte boundary as x86_64 stacks should be.
847e39bee8fSSean Christopherson 	 */
848e39bee8fSSean Christopherson 	asm volatile("movq $0, -0x4(%rsp)");
849e39bee8fSSean Christopherson 
850e39bee8fSSean Christopherson 	return 0;
851e39bee8fSSean Christopherson }
852e39bee8fSSean Christopherson 
8535faf5f60SSean Christopherson /*
8545faf5f60SSean Christopherson  * Switch from 64-bit to 32-bit mode and generate #OF via INTO.  Note, if RIP
8555faf5f60SSean Christopherson  * or RSP holds a 64-bit value, this helper will NOT generate #OF.
8565faf5f60SSean Christopherson  */
8575faf5f60SSean Christopherson static inline void generate_of(void)
8585faf5f60SSean Christopherson {
8595faf5f60SSean Christopherson 	struct far_pointer32 fp = {
8605faf5f60SSean Christopherson 		.offset = (uintptr_t)&&into,
8615faf5f60SSean Christopherson 		.selector = KERNEL_CS32,
8625faf5f60SSean Christopherson 	};
8635faf5f60SSean Christopherson 	uintptr_t rsp;
8645faf5f60SSean Christopherson 
8655faf5f60SSean Christopherson 	asm volatile ("mov %%rsp, %0" : "=r"(rsp));
8665faf5f60SSean Christopherson 
8675faf5f60SSean Christopherson 	if (fp.offset != (uintptr_t)&&into) {
8685faf5f60SSean Christopherson 		printf("Code address too high.\n");
8695faf5f60SSean Christopherson 		return;
8705faf5f60SSean Christopherson 	}
8715faf5f60SSean Christopherson 	if ((u32)rsp != rsp) {
8725faf5f60SSean Christopherson 		printf("Stack address too high.\n");
8735faf5f60SSean Christopherson 		return;
8745faf5f60SSean Christopherson 	}
8755faf5f60SSean Christopherson 
8765faf5f60SSean Christopherson 	asm goto ("lcall *%0" : : "m" (fp) : "rax" : into);
8775faf5f60SSean Christopherson 	return;
8785faf5f60SSean Christopherson into:
8795faf5f60SSean Christopherson 	asm volatile (".code32;"
8805faf5f60SSean Christopherson 		      "movl $0x7fffffff, %eax;"
8815faf5f60SSean Christopherson 		      "addl %eax, %eax;"
8825faf5f60SSean Christopherson 		      "into;"
8835faf5f60SSean Christopherson 		      "lret;"
8845faf5f60SSean Christopherson 		      ".code64");
8855faf5f60SSean Christopherson 	__builtin_unreachable();
8865faf5f60SSean Christopherson }
8875faf5f60SSean Christopherson 
888694e59baSManali Shukla static inline void fnop(void)
889694e59baSManali Shukla {
890694e59baSManali Shukla 	asm volatile("fnop");
891694e59baSManali Shukla }
892694e59baSManali Shukla 
893694e59baSManali Shukla /* If CR0.TS is set in L2, #NM is generated. */
894694e59baSManali Shukla static inline void generate_cr0_ts_nm(void)
895694e59baSManali Shukla {
896694e59baSManali Shukla 	write_cr0((read_cr0() & ~X86_CR0_EM) | X86_CR0_TS);
897694e59baSManali Shukla 	fnop();
898694e59baSManali Shukla }
899694e59baSManali Shukla 
900694e59baSManali Shukla /* If CR0.TS is cleared and CR0.EM is set, #NM is generated. */
901694e59baSManali Shukla static inline void generate_cr0_em_nm(void)
902694e59baSManali Shukla {
903694e59baSManali Shukla 	write_cr0((read_cr0() & ~X86_CR0_TS) | X86_CR0_EM);
904694e59baSManali Shukla 	fnop();
905694e59baSManali Shukla }
906694e59baSManali Shukla 
9072719b92cSYang Weijiang static inline u8 pmu_version(void)
9082719b92cSYang Weijiang {
9092719b92cSYang Weijiang 	return cpuid(10).a & 0xff;
9102719b92cSYang Weijiang }
9112719b92cSYang Weijiang 
9122b4c8e50SYang Weijiang static inline bool this_cpu_has_pmu(void)
9132b4c8e50SYang Weijiang {
9142b4c8e50SYang Weijiang 	return !!pmu_version();
9152b4c8e50SYang Weijiang }
9162b4c8e50SYang Weijiang 
9172b4c8e50SYang Weijiang static inline bool this_cpu_has_perf_global_ctrl(void)
9182b4c8e50SYang Weijiang {
9192b4c8e50SYang Weijiang 	return pmu_version() > 1;
9202b4c8e50SYang Weijiang }
9212b4c8e50SYang Weijiang 
9222719b92cSYang Weijiang static inline u8 pmu_nr_gp_counters(void)
9232719b92cSYang Weijiang {
9242719b92cSYang Weijiang 	return (cpuid(10).a >> 8) & 0xff;
9252719b92cSYang Weijiang }
9262719b92cSYang Weijiang 
9272719b92cSYang Weijiang static inline u8 pmu_gp_counter_width(void)
9282719b92cSYang Weijiang {
9292719b92cSYang Weijiang 	return (cpuid(10).a >> 16) & 0xff;
9302719b92cSYang Weijiang }
9312719b92cSYang Weijiang 
9322719b92cSYang Weijiang static inline u8 pmu_gp_counter_mask_length(void)
9332719b92cSYang Weijiang {
9342719b92cSYang Weijiang 	return (cpuid(10).a >> 24) & 0xff;
9352719b92cSYang Weijiang }
9362719b92cSYang Weijiang 
9372719b92cSYang Weijiang static inline u8 pmu_nr_fixed_counters(void)
9382719b92cSYang Weijiang {
9392719b92cSYang Weijiang 	struct cpuid id = cpuid(10);
9402719b92cSYang Weijiang 
9412719b92cSYang Weijiang 	if ((id.a & 0xff) > 1)
9422719b92cSYang Weijiang 		return id.d & 0x1f;
9432719b92cSYang Weijiang 	else
9442719b92cSYang Weijiang 		return 0;
9452719b92cSYang Weijiang }
9462719b92cSYang Weijiang 
9472719b92cSYang Weijiang static inline u8 pmu_fixed_counter_width(void)
9482719b92cSYang Weijiang {
9492719b92cSYang Weijiang 	struct cpuid id = cpuid(10);
9502719b92cSYang Weijiang 
9512719b92cSYang Weijiang 	if ((id.a & 0xff) > 1)
9522719b92cSYang Weijiang 		return (id.d >> 5) & 0xff;
9532719b92cSYang Weijiang 	else
9542719b92cSYang Weijiang 		return 0;
9552719b92cSYang Weijiang }
9562719b92cSYang Weijiang 
9572719b92cSYang Weijiang static inline bool pmu_gp_counter_is_available(int i)
9582719b92cSYang Weijiang {
9592719b92cSYang Weijiang 	/* CPUID.0xA.EBX bit is '1 if they counter is NOT available. */
9602719b92cSYang Weijiang 	return !(cpuid(10).b & BIT(i));
9612719b92cSYang Weijiang }
9622719b92cSYang Weijiang 
963c3cde0a5SLike Xu static inline u64 this_cpu_perf_capabilities(void)
964c3cde0a5SLike Xu {
965c3cde0a5SLike Xu 	if (!this_cpu_has(X86_FEATURE_PDCM))
966c3cde0a5SLike Xu 		return 0;
967c3cde0a5SLike Xu 
968c3cde0a5SLike Xu 	return rdmsr(MSR_IA32_PERF_CAPABILITIES);
969c3cde0a5SLike Xu }
970c3cde0a5SLike Xu 
9717d36db35SAvi Kivity #endif
972