xref: /kvm-unit-tests/lib/x86/processor.h (revision fa68c03765ce1668cf057f76677c5d76ca99edcf)
1c865f654SCornelia Huck #ifndef _X86_PROCESSOR_H_
2c865f654SCornelia Huck #define _X86_PROCESSOR_H_
37d36db35SAvi Kivity 
47d36db35SAvi Kivity #include "libcflat.h"
5142ff635SSean Christopherson #include "desc.h"
6867f820dSPaolo Bonzini #include "msr.h"
7e2f3fe1dSSean Christopherson #include <bitops.h>
82b2d7aadSAvi Kivity #include <stdint.h>
97d36db35SAvi Kivity 
1088f0bb17SSean Christopherson #define NONCANONICAL	0xaaaaaaaaaaaaaaaaull
1188f0bb17SSean Christopherson 
12a3d1fb55SPaolo Bonzini #ifdef __x86_64__
13a3d1fb55SPaolo Bonzini #  define R "r"
14a3d1fb55SPaolo Bonzini #  define W "q"
15a3d1fb55SPaolo Bonzini #  define S "8"
16a3d1fb55SPaolo Bonzini #else
17a3d1fb55SPaolo Bonzini #  define R "e"
18a3d1fb55SPaolo Bonzini #  define W "l"
19a3d1fb55SPaolo Bonzini #  define S "4"
20a3d1fb55SPaolo Bonzini #endif
21a3d1fb55SPaolo Bonzini 
222b934609SXiaoyao Li #define DB_VECTOR 1
232b934609SXiaoyao Li #define BP_VECTOR 3
242b934609SXiaoyao Li #define UD_VECTOR 6
258d2cdb35SMarc Orr #define DF_VECTOR 8
268d2cdb35SMarc Orr #define TS_VECTOR 10
278d2cdb35SMarc Orr #define NP_VECTOR 11
288d2cdb35SMarc Orr #define SS_VECTOR 12
298d2cdb35SMarc Orr #define GP_VECTOR 13
308d2cdb35SMarc Orr #define PF_VECTOR 14
318d2cdb35SMarc Orr #define AC_VECTOR 17
32c986dbe8SNadav Amit #define CP_VECTOR 21
338d2cdb35SMarc Orr 
34a106b30dSPaolo Bonzini #define X86_CR0_PE_BIT		(0)
35a106b30dSPaolo Bonzini #define X86_CR0_PE		BIT(X86_CR0_PE_BIT)
36a106b30dSPaolo Bonzini #define X86_CR0_MP_BIT		(1)
37a106b30dSPaolo Bonzini #define X86_CR0_MP		BIT(X86_CR0_MP_BIT)
38a106b30dSPaolo Bonzini #define X86_CR0_EM_BIT		(2)
39a106b30dSPaolo Bonzini #define X86_CR0_EM		BIT(X86_CR0_EM_BIT)
40a106b30dSPaolo Bonzini #define X86_CR0_TS_BIT		(3)
41a106b30dSPaolo Bonzini #define X86_CR0_TS		BIT(X86_CR0_TS_BIT)
42a106b30dSPaolo Bonzini #define X86_CR0_ET_BIT		(4)
43a106b30dSPaolo Bonzini #define X86_CR0_ET		BIT(X86_CR0_ET_BIT)
44a106b30dSPaolo Bonzini #define X86_CR0_NE_BIT		(5)
45a106b30dSPaolo Bonzini #define X86_CR0_NE		BIT(X86_CR0_NE_BIT)
46a106b30dSPaolo Bonzini #define X86_CR0_WP_BIT		(16)
47a106b30dSPaolo Bonzini #define X86_CR0_WP		BIT(X86_CR0_WP_BIT)
48a106b30dSPaolo Bonzini #define X86_CR0_AM_BIT		(18)
49a106b30dSPaolo Bonzini #define X86_CR0_AM		BIT(X86_CR0_AM_BIT)
50a106b30dSPaolo Bonzini #define X86_CR0_NW_BIT		(29)
51a106b30dSPaolo Bonzini #define X86_CR0_NW		BIT(X86_CR0_NW_BIT)
52a106b30dSPaolo Bonzini #define X86_CR0_CD_BIT		(30)
53a106b30dSPaolo Bonzini #define X86_CR0_CD		BIT(X86_CR0_CD_BIT)
54a106b30dSPaolo Bonzini #define X86_CR0_PG_BIT		(31)
55a106b30dSPaolo Bonzini #define X86_CR0_PG		BIT(X86_CR0_PG_BIT)
56b6a0ff03SPaolo Bonzini 
57e2f3fe1dSSean Christopherson #define X86_CR3_PCID_MASK	GENMASK(11, 0)
58e2f3fe1dSSean Christopherson 
59a106b30dSPaolo Bonzini #define X86_CR4_VME_BIT		(0)
60a106b30dSPaolo Bonzini #define X86_CR4_VME		BIT(X86_CR4_VME_BIT)
61a106b30dSPaolo Bonzini #define X86_CR4_PVI_BIT		(1)
62a106b30dSPaolo Bonzini #define X86_CR4_PVI		BIT(X86_CR4_PVI_BIT)
63a106b30dSPaolo Bonzini #define X86_CR4_TSD_BIT		(2)
64a106b30dSPaolo Bonzini #define X86_CR4_TSD		BIT(X86_CR4_TSD_BIT)
65a106b30dSPaolo Bonzini #define X86_CR4_DE_BIT		(3)
66a106b30dSPaolo Bonzini #define X86_CR4_DE		BIT(X86_CR4_DE_BIT)
67a106b30dSPaolo Bonzini #define X86_CR4_PSE_BIT		(4)
68a106b30dSPaolo Bonzini #define X86_CR4_PSE		BIT(X86_CR4_PSE_BIT)
69a106b30dSPaolo Bonzini #define X86_CR4_PAE_BIT		(5)
70a106b30dSPaolo Bonzini #define X86_CR4_PAE		BIT(X86_CR4_PAE_BIT)
71a106b30dSPaolo Bonzini #define X86_CR4_MCE_BIT		(6)
72a106b30dSPaolo Bonzini #define X86_CR4_MCE		BIT(X86_CR4_MCE_BIT)
73a106b30dSPaolo Bonzini #define X86_CR4_PGE_BIT		(7)
74a106b30dSPaolo Bonzini #define X86_CR4_PGE		BIT(X86_CR4_PGE_BIT)
75a106b30dSPaolo Bonzini #define X86_CR4_PCE_BIT		(8)
76a106b30dSPaolo Bonzini #define X86_CR4_PCE		BIT(X86_CR4_PCE_BIT)
77a106b30dSPaolo Bonzini #define X86_CR4_OSFXSR_BIT	(9)
78a106b30dSPaolo Bonzini #define X86_CR4_OSFXSR		BIT(X86_CR4_OSFXSR_BIT)
79a106b30dSPaolo Bonzini #define X86_CR4_OSXMMEXCPT_BIT	(10)
80a106b30dSPaolo Bonzini #define X86_CR4_OSXMMEXCPT	BIT(X86_CR4_OSXMMEXCPT_BIT)
81a106b30dSPaolo Bonzini #define X86_CR4_UMIP_BIT	(11)
82a106b30dSPaolo Bonzini #define X86_CR4_UMIP		BIT(X86_CR4_UMIP_BIT)
83a106b30dSPaolo Bonzini #define X86_CR4_LA57_BIT	(12)
84a106b30dSPaolo Bonzini #define X86_CR4_LA57		BIT(X86_CR4_LA57_BIT)
85a106b30dSPaolo Bonzini #define X86_CR4_VMXE_BIT	(13)
86a106b30dSPaolo Bonzini #define X86_CR4_VMXE		BIT(X86_CR4_VMXE_BIT)
87a106b30dSPaolo Bonzini #define X86_CR4_SMXE_BIT	(14)
88a106b30dSPaolo Bonzini #define X86_CR4_SMXE		BIT(X86_CR4_SMXE_BIT)
89a106b30dSPaolo Bonzini /* UNUSED			(15) */
90a106b30dSPaolo Bonzini #define X86_CR4_FSGSBASE_BIT	(16)
91a106b30dSPaolo Bonzini #define X86_CR4_FSGSBASE	BIT(X86_CR4_FSGSBASE_BIT)
92a106b30dSPaolo Bonzini #define X86_CR4_PCIDE_BIT	(17)
93a106b30dSPaolo Bonzini #define X86_CR4_PCIDE		BIT(X86_CR4_PCIDE_BIT)
94a106b30dSPaolo Bonzini #define X86_CR4_OSXSAVE_BIT	(18)
95a106b30dSPaolo Bonzini #define X86_CR4_OSXSAVE		BIT(X86_CR4_OSXSAVE_BIT)
96a106b30dSPaolo Bonzini #define X86_CR4_KL_BIT		(19)
97a106b30dSPaolo Bonzini #define X86_CR4_KL		BIT(X86_CR4_KL_BIT)
98a106b30dSPaolo Bonzini #define X86_CR4_SMEP_BIT	(20)
99a106b30dSPaolo Bonzini #define X86_CR4_SMEP		BIT(X86_CR4_SMEP_BIT)
100a106b30dSPaolo Bonzini #define X86_CR4_SMAP_BIT	(21)
101a106b30dSPaolo Bonzini #define X86_CR4_SMAP		BIT(X86_CR4_SMAP_BIT)
102a106b30dSPaolo Bonzini #define X86_CR4_PKE_BIT		(22)
103a106b30dSPaolo Bonzini #define X86_CR4_PKE		BIT(X86_CR4_PKE_BIT)
104a106b30dSPaolo Bonzini #define X86_CR4_CET_BIT		(23)
105a106b30dSPaolo Bonzini #define X86_CR4_CET		BIT(X86_CR4_CET_BIT)
106a106b30dSPaolo Bonzini #define X86_CR4_PKS_BIT		(24)
107a106b30dSPaolo Bonzini #define X86_CR4_PKS		BIT(X86_CR4_PKS_BIT)
108e2f3fe1dSSean Christopherson 
109a106b30dSPaolo Bonzini #define X86_EFLAGS_CF_BIT	(0)
110a106b30dSPaolo Bonzini #define X86_EFLAGS_CF		BIT(X86_EFLAGS_CF_BIT)
111a106b30dSPaolo Bonzini #define X86_EFLAGS_FIXED_BIT	(1)
112a106b30dSPaolo Bonzini #define X86_EFLAGS_FIXED	BIT(X86_EFLAGS_FIXED_BIT)
113a106b30dSPaolo Bonzini #define X86_EFLAGS_PF_BIT	(2)
114a106b30dSPaolo Bonzini #define X86_EFLAGS_PF		BIT(X86_EFLAGS_PF_BIT)
115a106b30dSPaolo Bonzini /* RESERVED 0			(3) */
116a106b30dSPaolo Bonzini #define X86_EFLAGS_AF_BIT	(4)
117a106b30dSPaolo Bonzini #define X86_EFLAGS_AF		BIT(X86_EFLAGS_AF_BIT)
118a106b30dSPaolo Bonzini /* RESERVED 0			(5) */
119a106b30dSPaolo Bonzini #define X86_EFLAGS_ZF_BIT	(6)
120a106b30dSPaolo Bonzini #define X86_EFLAGS_ZF		BIT(X86_EFLAGS_ZF_BIT)
121a106b30dSPaolo Bonzini #define X86_EFLAGS_SF_BIT	(7)
122a106b30dSPaolo Bonzini #define X86_EFLAGS_SF		BIT(X86_EFLAGS_SF_BIT)
123a106b30dSPaolo Bonzini #define X86_EFLAGS_TF_BIT	(8)
124a106b30dSPaolo Bonzini #define X86_EFLAGS_TF		BIT(X86_EFLAGS_TF_BIT)
125a106b30dSPaolo Bonzini #define X86_EFLAGS_IF_BIT	(9)
126a106b30dSPaolo Bonzini #define X86_EFLAGS_IF		BIT(X86_EFLAGS_IF_BIT)
127a106b30dSPaolo Bonzini #define X86_EFLAGS_DF_BIT	(10)
128a106b30dSPaolo Bonzini #define X86_EFLAGS_DF		BIT(X86_EFLAGS_DF_BIT)
129a106b30dSPaolo Bonzini #define X86_EFLAGS_OF_BIT	(11)
130a106b30dSPaolo Bonzini #define X86_EFLAGS_OF		BIT(X86_EFLAGS_OF_BIT)
131e2f3fe1dSSean Christopherson #define X86_EFLAGS_IOPL		GENMASK(13, 12)
132a106b30dSPaolo Bonzini #define X86_EFLAGS_NT_BIT	(14)
133a106b30dSPaolo Bonzini #define X86_EFLAGS_NT		BIT(X86_EFLAGS_NT_BIT)
134a106b30dSPaolo Bonzini /* RESERVED 0			(15) */
135a106b30dSPaolo Bonzini #define X86_EFLAGS_RF_BIT	(16)
136a106b30dSPaolo Bonzini #define X86_EFLAGS_RF		BIT(X86_EFLAGS_RF_BIT)
137a106b30dSPaolo Bonzini #define X86_EFLAGS_VM_BIT	(17)
138a106b30dSPaolo Bonzini #define X86_EFLAGS_VM		BIT(X86_EFLAGS_VM_BIT)
139a106b30dSPaolo Bonzini #define X86_EFLAGS_AC_BIT	(18)
140a106b30dSPaolo Bonzini #define X86_EFLAGS_AC		BIT(X86_EFLAGS_AC_BIT)
141a106b30dSPaolo Bonzini #define X86_EFLAGS_VIF_BIT	(19)
142a106b30dSPaolo Bonzini #define X86_EFLAGS_VIF		BIT(X86_EFLAGS_VIF_BIT)
143a106b30dSPaolo Bonzini #define X86_EFLAGS_VIP_BIT	(20)
144a106b30dSPaolo Bonzini #define X86_EFLAGS_VIP		BIT(X86_EFLAGS_VIP_BIT)
145a106b30dSPaolo Bonzini #define X86_EFLAGS_ID_BIT	(21)
146a106b30dSPaolo Bonzini #define X86_EFLAGS_ID		BIT(X86_EFLAGS_ID_BIT)
1477488d290SPaolo Bonzini 
14860d8090cSLiran Alon #define X86_EFLAGS_ALU (X86_EFLAGS_CF | X86_EFLAGS_PF | X86_EFLAGS_AF | \
14960d8090cSLiran Alon 			X86_EFLAGS_ZF | X86_EFLAGS_SF | X86_EFLAGS_OF)
15060d8090cSLiran Alon 
151a3d1fb55SPaolo Bonzini 
1526ddcc298SKrish Sadhukhan /*
1536ddcc298SKrish Sadhukhan  * CPU features
1546ddcc298SKrish Sadhukhan  */
1556ddcc298SKrish Sadhukhan 
1566ddcc298SKrish Sadhukhan enum cpuid_output_regs {
1576ddcc298SKrish Sadhukhan 	EAX,
1586ddcc298SKrish Sadhukhan 	EBX,
1596ddcc298SKrish Sadhukhan 	ECX,
1606ddcc298SKrish Sadhukhan 	EDX
1616ddcc298SKrish Sadhukhan };
1626ddcc298SKrish Sadhukhan 
1636ddcc298SKrish Sadhukhan struct cpuid { u32 a, b, c, d; };
1646ddcc298SKrish Sadhukhan 
1656ddcc298SKrish Sadhukhan static inline struct cpuid raw_cpuid(u32 function, u32 index)
1666ddcc298SKrish Sadhukhan {
1676ddcc298SKrish Sadhukhan 	struct cpuid r;
1686ddcc298SKrish Sadhukhan 	asm volatile ("cpuid"
1696ddcc298SKrish Sadhukhan 		      : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d)
1706ddcc298SKrish Sadhukhan 		      : "0"(function), "2"(index));
1716ddcc298SKrish Sadhukhan 	return r;
1726ddcc298SKrish Sadhukhan }
1736ddcc298SKrish Sadhukhan 
1746ddcc298SKrish Sadhukhan static inline struct cpuid cpuid_indexed(u32 function, u32 index)
1756ddcc298SKrish Sadhukhan {
1766ddcc298SKrish Sadhukhan 	u32 level = raw_cpuid(function & 0xf0000000, 0).a;
1776ddcc298SKrish Sadhukhan 	if (level < function)
1786ddcc298SKrish Sadhukhan 	return (struct cpuid) { 0, 0, 0, 0 };
1796ddcc298SKrish Sadhukhan 	return raw_cpuid(function, index);
1806ddcc298SKrish Sadhukhan }
1816ddcc298SKrish Sadhukhan 
1826ddcc298SKrish Sadhukhan static inline struct cpuid cpuid(u32 function)
1836ddcc298SKrish Sadhukhan {
1846ddcc298SKrish Sadhukhan 	return cpuid_indexed(function, 0);
1856ddcc298SKrish Sadhukhan }
1866ddcc298SKrish Sadhukhan 
1876ddcc298SKrish Sadhukhan static inline u8 cpuid_maxphyaddr(void)
1886ddcc298SKrish Sadhukhan {
1896ddcc298SKrish Sadhukhan 	if (raw_cpuid(0x80000000, 0).a < 0x80000008)
1906ddcc298SKrish Sadhukhan 	return 36;
1916ddcc298SKrish Sadhukhan 	return raw_cpuid(0x80000008, 0).a & 0xff;
1926ddcc298SKrish Sadhukhan }
1936ddcc298SKrish Sadhukhan 
19422abdd39SNadav Amit static inline bool is_intel(void)
19522abdd39SNadav Amit {
19622abdd39SNadav Amit 	struct cpuid c = cpuid(0);
19722abdd39SNadav Amit 	u32 name[4] = {c.b, c.d, c.c };
19822abdd39SNadav Amit 
19922abdd39SNadav Amit 	return strcmp((char *)name, "GenuineIntel") == 0;
20022abdd39SNadav Amit }
20122abdd39SNadav Amit 
2026ddcc298SKrish Sadhukhan #define	CPUID(a, b, c, d) ((((unsigned long long) a) << 32) | (b << 16) | \
2036ddcc298SKrish Sadhukhan 			  (c << 8) | d)
2046ddcc298SKrish Sadhukhan 
2056ddcc298SKrish Sadhukhan /*
2066ddcc298SKrish Sadhukhan  * Each X86_FEATURE_XXX definition is 64-bit and contains the following
2076ddcc298SKrish Sadhukhan  * CPUID meta-data:
2086ddcc298SKrish Sadhukhan  *
2096ddcc298SKrish Sadhukhan  * 	[63:32] :  input value for EAX
2106ddcc298SKrish Sadhukhan  * 	[31:16] :  input value for ECX
2116ddcc298SKrish Sadhukhan  * 	[15:8]  :  output register
2126ddcc298SKrish Sadhukhan  * 	[7:0]   :  bit position in output register
2136ddcc298SKrish Sadhukhan  */
2146ddcc298SKrish Sadhukhan 
2156ddcc298SKrish Sadhukhan /*
216b52bf046SSean Christopherson  * Basic Leafs, a.k.a. Intel defined
2176ddcc298SKrish Sadhukhan  */
2186ddcc298SKrish Sadhukhan #define	X86_FEATURE_MWAIT		(CPUID(0x1, 0, ECX, 3))
2196ddcc298SKrish Sadhukhan #define	X86_FEATURE_VMX			(CPUID(0x1, 0, ECX, 5))
2209d9000a5SYang Weijiang #define	X86_FEATURE_PDCM		(CPUID(0x1, 0, ECX, 15))
2216ddcc298SKrish Sadhukhan #define	X86_FEATURE_PCID		(CPUID(0x1, 0, ECX, 17))
222baf248c5SSean Christopherson #define X86_FEATURE_X2APIC		(CPUID(0x1, 0, ECX, 21))
2236ddcc298SKrish Sadhukhan #define	X86_FEATURE_MOVBE		(CPUID(0x1, 0, ECX, 22))
2246ddcc298SKrish Sadhukhan #define	X86_FEATURE_TSC_DEADLINE_TIMER	(CPUID(0x1, 0, ECX, 24))
2256ddcc298SKrish Sadhukhan #define	X86_FEATURE_XSAVE		(CPUID(0x1, 0, ECX, 26))
2266ddcc298SKrish Sadhukhan #define	X86_FEATURE_OSXSAVE		(CPUID(0x1, 0, ECX, 27))
2276ddcc298SKrish Sadhukhan #define	X86_FEATURE_RDRAND		(CPUID(0x1, 0, ECX, 30))
2286ddcc298SKrish Sadhukhan #define	X86_FEATURE_MCE			(CPUID(0x1, 0, EDX, 7))
2296ddcc298SKrish Sadhukhan #define	X86_FEATURE_APIC		(CPUID(0x1, 0, EDX, 9))
2306ddcc298SKrish Sadhukhan #define	X86_FEATURE_CLFLUSH		(CPUID(0x1, 0, EDX, 19))
2316ddcc298SKrish Sadhukhan #define	X86_FEATURE_XMM			(CPUID(0x1, 0, EDX, 25))
2326ddcc298SKrish Sadhukhan #define	X86_FEATURE_XMM2		(CPUID(0x1, 0, EDX, 26))
2336ddcc298SKrish Sadhukhan #define	X86_FEATURE_TSC_ADJUST		(CPUID(0x7, 0, EBX, 1))
2346163f75dSPaolo Bonzini #define	X86_FEATURE_HLE			(CPUID(0x7, 0, EBX, 4))
23597b5f955SSean Christopherson #define	X86_FEATURE_SMEP		(CPUID(0x7, 0, EBX, 7))
2366ddcc298SKrish Sadhukhan #define	X86_FEATURE_INVPCID		(CPUID(0x7, 0, EBX, 10))
2376ddcc298SKrish Sadhukhan #define	X86_FEATURE_RTM			(CPUID(0x7, 0, EBX, 11))
2386ddcc298SKrish Sadhukhan #define	X86_FEATURE_SMAP		(CPUID(0x7, 0, EBX, 20))
2396ddcc298SKrish Sadhukhan #define	X86_FEATURE_PCOMMIT		(CPUID(0x7, 0, EBX, 22))
2406ddcc298SKrish Sadhukhan #define	X86_FEATURE_CLFLUSHOPT		(CPUID(0x7, 0, EBX, 23))
2416ddcc298SKrish Sadhukhan #define	X86_FEATURE_CLWB		(CPUID(0x7, 0, EBX, 24))
2426ddcc298SKrish Sadhukhan #define	X86_FEATURE_UMIP		(CPUID(0x7, 0, ECX, 2))
2436ddcc298SKrish Sadhukhan #define	X86_FEATURE_PKU			(CPUID(0x7, 0, ECX, 3))
2446ddcc298SKrish Sadhukhan #define	X86_FEATURE_LA57		(CPUID(0x7, 0, ECX, 16))
2456ddcc298SKrish Sadhukhan #define	X86_FEATURE_RDPID		(CPUID(0x7, 0, ECX, 22))
24679e53994SYang Weijiang #define	X86_FEATURE_SHSTK		(CPUID(0x7, 0, ECX, 7))
24779e53994SYang Weijiang #define	X86_FEATURE_IBT			(CPUID(0x7, 0, EDX, 20))
2486ddcc298SKrish Sadhukhan #define	X86_FEATURE_SPEC_CTRL		(CPUID(0x7, 0, EDX, 26))
2495cf6a3faSSean Christopherson #define	X86_FEATURE_FLUSH_L1D		(CPUID(0x7, 0, EDX, 28))
2506163f75dSPaolo Bonzini #define	X86_FEATURE_ARCH_CAPABILITIES	(CPUID(0x7, 0, EDX, 29))
251fdae6092SChenyi Qiang #define	X86_FEATURE_PKS			(CPUID(0x7, 0, ECX, 31))
2526ddcc298SKrish Sadhukhan 
2536ddcc298SKrish Sadhukhan /*
254b52bf046SSean Christopherson  * Extended Leafs, a.k.a. AMD defined
2556ddcc298SKrish Sadhukhan  */
2566ddcc298SKrish Sadhukhan #define	X86_FEATURE_SVM			(CPUID(0x80000001, 0, ECX, 2))
257b883751aSLike Xu #define	X86_FEATURE_PERFCTR_CORE	(CPUID(0x80000001, 0, ECX, 23))
258b52bf046SSean Christopherson #define	X86_FEATURE_NX			(CPUID(0x80000001, 0, EDX, 20))
259b52bf046SSean Christopherson #define	X86_FEATURE_GBPAGES		(CPUID(0x80000001, 0, EDX, 26))
2606ddcc298SKrish Sadhukhan #define	X86_FEATURE_RDTSCP		(CPUID(0x80000001, 0, EDX, 27))
261b52bf046SSean Christopherson #define	X86_FEATURE_LM			(CPUID(0x80000001, 0, EDX, 29))
262b52bf046SSean Christopherson #define	X86_FEATURE_RDPRU		(CPUID(0x80000008, 0, EBX, 4))
2636ddcc298SKrish Sadhukhan #define	X86_FEATURE_AMD_IBPB		(CPUID(0x80000008, 0, EBX, 12))
2646ddcc298SKrish Sadhukhan #define	X86_FEATURE_NPT			(CPUID(0x8000000A, 0, EDX, 0))
265537d39dfSMaxim Levitsky #define	X86_FEATURE_LBRV		(CPUID(0x8000000A, 0, EDX, 1))
2666ddcc298SKrish Sadhukhan #define	X86_FEATURE_NRIPS		(CPUID(0x8000000A, 0, EDX, 3))
267a8503d50SMaxim Levitsky #define X86_FEATURE_TSCRATEMSR		(CPUID(0x8000000A, 0, EDX, 4))
2688650dffeSMaxim Levitsky #define X86_FEATURE_PAUSEFILTER		(CPUID(0x8000000A, 0, EDX, 10))
2698650dffeSMaxim Levitsky #define X86_FEATURE_PFTHRESHOLD		(CPUID(0x8000000A, 0, EDX, 12))
270f6972bd6SLara Lazier #define	X86_FEATURE_VGIF		(CPUID(0x8000000A, 0, EDX, 16))
27108200397SSantosh Shukla #define X86_FEATURE_VNMI		(CPUID(0x8000000A, 0, EDX, 25))
272952cf19cSLike Xu #define	X86_FEATURE_AMD_PMU_V2		(CPUID(0x80000022, 0, EAX, 0))
2736ddcc298SKrish Sadhukhan 
2746ddcc298SKrish Sadhukhan static inline bool this_cpu_has(u64 feature)
2756ddcc298SKrish Sadhukhan {
2766ddcc298SKrish Sadhukhan 	u32 input_eax = feature >> 32;
2776ddcc298SKrish Sadhukhan 	u32 input_ecx = (feature >> 16) & 0xffff;
2786ddcc298SKrish Sadhukhan 	u32 output_reg = (feature >> 8) & 0xff;
2796ddcc298SKrish Sadhukhan 	u8 bit = feature & 0xff;
2806ddcc298SKrish Sadhukhan 	struct cpuid c;
2816ddcc298SKrish Sadhukhan 	u32 *tmp;
2826ddcc298SKrish Sadhukhan 
2836ddcc298SKrish Sadhukhan 	c = cpuid_indexed(input_eax, input_ecx);
2846ddcc298SKrish Sadhukhan 	tmp = (u32 *)&c;
2856ddcc298SKrish Sadhukhan 
2866ddcc298SKrish Sadhukhan 	return ((*(tmp + (output_reg % 32))) & (1 << bit));
2876ddcc298SKrish Sadhukhan }
2886ddcc298SKrish Sadhukhan 
2891bde9127SJim Mattson struct far_pointer32 {
2901bde9127SJim Mattson 	u32 offset;
2911bde9127SJim Mattson 	u16 selector;
2921bde9127SJim Mattson } __attribute__((packed));
2931bde9127SJim Mattson 
2947d36db35SAvi Kivity struct descriptor_table_ptr {
2957d36db35SAvi Kivity 	u16 limit;
2967d36db35SAvi Kivity 	ulong base;
2977d36db35SAvi Kivity } __attribute__((packed));
2987d36db35SAvi Kivity 
299fa6318d1SPaolo Bonzini static inline void clac(void)
300fa6318d1SPaolo Bonzini {
301fa6318d1SPaolo Bonzini 	asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory");
302fa6318d1SPaolo Bonzini }
303fa6318d1SPaolo Bonzini 
304fa6318d1SPaolo Bonzini static inline void stac(void)
305fa6318d1SPaolo Bonzini {
306fa6318d1SPaolo Bonzini 	asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory");
307fa6318d1SPaolo Bonzini }
308fa6318d1SPaolo Bonzini 
3097d36db35SAvi Kivity static inline u16 read_cs(void)
3107d36db35SAvi Kivity {
3117d36db35SAvi Kivity 	unsigned val;
3127d36db35SAvi Kivity 
313eab64501SPaolo Bonzini 	asm volatile ("mov %%cs, %0" : "=mr"(val));
3147d36db35SAvi Kivity 	return val;
3157d36db35SAvi Kivity }
3167d36db35SAvi Kivity 
3177d36db35SAvi Kivity static inline u16 read_ds(void)
3187d36db35SAvi Kivity {
3197d36db35SAvi Kivity 	unsigned val;
3207d36db35SAvi Kivity 
321eab64501SPaolo Bonzini 	asm volatile ("mov %%ds, %0" : "=mr"(val));
3227d36db35SAvi Kivity 	return val;
3237d36db35SAvi Kivity }
3247d36db35SAvi Kivity 
3257d36db35SAvi Kivity static inline u16 read_es(void)
3267d36db35SAvi Kivity {
3277d36db35SAvi Kivity 	unsigned val;
3287d36db35SAvi Kivity 
329eab64501SPaolo Bonzini 	asm volatile ("mov %%es, %0" : "=mr"(val));
3307d36db35SAvi Kivity 	return val;
3317d36db35SAvi Kivity }
3327d36db35SAvi Kivity 
3337d36db35SAvi Kivity static inline u16 read_ss(void)
3347d36db35SAvi Kivity {
3357d36db35SAvi Kivity 	unsigned val;
3367d36db35SAvi Kivity 
337eab64501SPaolo Bonzini 	asm volatile ("mov %%ss, %0" : "=mr"(val));
3387d36db35SAvi Kivity 	return val;
3397d36db35SAvi Kivity }
3407d36db35SAvi Kivity 
3417d36db35SAvi Kivity static inline u16 read_fs(void)
3427d36db35SAvi Kivity {
3437d36db35SAvi Kivity 	unsigned val;
3447d36db35SAvi Kivity 
345eab64501SPaolo Bonzini 	asm volatile ("mov %%fs, %0" : "=mr"(val));
3467d36db35SAvi Kivity 	return val;
3477d36db35SAvi Kivity }
3487d36db35SAvi Kivity 
3497d36db35SAvi Kivity static inline u16 read_gs(void)
3507d36db35SAvi Kivity {
3517d36db35SAvi Kivity 	unsigned val;
3527d36db35SAvi Kivity 
353eab64501SPaolo Bonzini 	asm volatile ("mov %%gs, %0" : "=mr"(val));
3547d36db35SAvi Kivity 	return val;
3557d36db35SAvi Kivity }
3567d36db35SAvi Kivity 
35777e03b63SGleb Natapov static inline unsigned long read_rflags(void)
35877e03b63SGleb Natapov {
35977e03b63SGleb Natapov 	unsigned long f;
360eab64501SPaolo Bonzini 	asm volatile ("pushf; pop %0\n\t" : "=rm"(f));
36177e03b63SGleb Natapov 	return f;
36277e03b63SGleb Natapov }
36377e03b63SGleb Natapov 
3647d36db35SAvi Kivity static inline void write_ds(unsigned val)
3657d36db35SAvi Kivity {
366eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory");
3677d36db35SAvi Kivity }
3687d36db35SAvi Kivity 
3697d36db35SAvi Kivity static inline void write_es(unsigned val)
3707d36db35SAvi Kivity {
371eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%es" : : "rm"(val) : "memory");
3727d36db35SAvi Kivity }
3737d36db35SAvi Kivity 
3747d36db35SAvi Kivity static inline void write_ss(unsigned val)
3757d36db35SAvi Kivity {
376eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory");
3777d36db35SAvi Kivity }
3787d36db35SAvi Kivity 
3797d36db35SAvi Kivity static inline void write_fs(unsigned val)
3807d36db35SAvi Kivity {
381eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory");
3827d36db35SAvi Kivity }
3837d36db35SAvi Kivity 
3847d36db35SAvi Kivity static inline void write_gs(unsigned val)
3857d36db35SAvi Kivity {
386eab64501SPaolo Bonzini 	asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory");
3877d36db35SAvi Kivity }
3887d36db35SAvi Kivity 
3897488d290SPaolo Bonzini static inline void write_rflags(unsigned long f)
3907488d290SPaolo Bonzini {
391eab64501SPaolo Bonzini 	asm volatile ("push %0; popf\n\t" : : "rm"(f));
3927488d290SPaolo Bonzini }
3937488d290SPaolo Bonzini 
394728e71eeSPaolo Bonzini static inline void set_iopl(int iopl)
395728e71eeSPaolo Bonzini {
396728e71eeSPaolo Bonzini 	unsigned long flags = read_rflags() & ~X86_EFLAGS_IOPL;
397728e71eeSPaolo Bonzini 	flags |= iopl * (X86_EFLAGS_IOPL / 3);
398728e71eeSPaolo Bonzini 	write_rflags(flags);
399728e71eeSPaolo Bonzini }
400728e71eeSPaolo Bonzini 
40164c8b768SSean Christopherson /*
40264c8b768SSean Christopherson  * Don't use the safe variants for rdmsr() or wrmsr().  The exception fixup
40364c8b768SSean Christopherson  * infrastructure uses per-CPU data and thus consumes GS.base.  Various tests
40464c8b768SSean Christopherson  * temporarily modify MSR_GS_BASE and will explode when trying to determine
40564c8b768SSean Christopherson  * whether or not RDMSR/WRMSR faulted.
40664c8b768SSean Christopherson  */
4077d36db35SAvi Kivity static inline u64 rdmsr(u32 index)
4087d36db35SAvi Kivity {
4097d36db35SAvi Kivity 	u32 a, d;
4107d36db35SAvi Kivity 	asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory");
4117d36db35SAvi Kivity 	return a | ((u64)d << 32);
4127d36db35SAvi Kivity }
4137d36db35SAvi Kivity 
4147d36db35SAvi Kivity static inline void wrmsr(u32 index, u64 val)
4157d36db35SAvi Kivity {
4167d36db35SAvi Kivity 	u32 a = val, d = val >> 32;
4177d36db35SAvi Kivity 	asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory");
4187d36db35SAvi Kivity }
4197d36db35SAvi Kivity 
4200ae3296eSSean Christopherson #define rdreg64_safe(insn, index, val)					\
4210ae3296eSSean Christopherson ({									\
4220ae3296eSSean Christopherson 	uint32_t a, d;							\
4230ae3296eSSean Christopherson 	int vector;							\
4240ae3296eSSean Christopherson 									\
4250ae3296eSSean Christopherson 	vector = asm_safe_out2(insn, "=a"(a), "=d"(d), "c"(index));	\
4260ae3296eSSean Christopherson 									\
4270ae3296eSSean Christopherson 	if (vector)							\
4280ae3296eSSean Christopherson 		*(val) = 0;						\
4290ae3296eSSean Christopherson 	else								\
4300ae3296eSSean Christopherson 		*(val) = (uint64_t)a | ((uint64_t)d << 32);		\
4310ae3296eSSean Christopherson 	vector;								\
4320ae3296eSSean Christopherson })
4330ae3296eSSean Christopherson 
434*fa68c037SSean Christopherson #define wrreg64_safe(insn, index, val)					\
435*fa68c037SSean Christopherson ({									\
436*fa68c037SSean Christopherson 	uint32_t eax = (val), edx = (val) >> 32;			\
437*fa68c037SSean Christopherson 									\
438*fa68c037SSean Christopherson 	asm_safe(insn, "a" (eax), "d" (edx), "c" (index));		\
439*fa68c037SSean Christopherson })
440*fa68c037SSean Christopherson 
441*fa68c037SSean Christopherson 
4420a4d8626SSean Christopherson static inline int rdmsr_safe(u32 index, uint64_t *val)
443142ff635SSean Christopherson {
4440ae3296eSSean Christopherson 	return rdreg64_safe("rdmsr", index, val);
445142ff635SSean Christopherson }
446142ff635SSean Christopherson 
4474143fbfdSSean Christopherson static inline int wrmsr_safe(u32 index, u64 val)
448142ff635SSean Christopherson {
449*fa68c037SSean Christopherson 	return wrreg64_safe("wrmsr", index, val);
450142ff635SSean Christopherson }
451142ff635SSean Christopherson 
45285c21181SLike Xu static inline int rdpmc_safe(u32 index, uint64_t *val)
4532b2d7aadSAvi Kivity {
4540ae3296eSSean Christopherson 	return rdreg64_safe("rdpmc", index, val);
45585c21181SLike Xu }
45685c21181SLike Xu 
457eab2fcf3SThomas Huth static inline uint64_t rdpmc(uint32_t index)
45885c21181SLike Xu {
45985c21181SLike Xu 	uint64_t val;
46085c21181SLike Xu 	int vector = rdpmc_safe(index, &val);
46185c21181SLike Xu 
46296772b2eSLike Xu 	assert_msg(!vector, "Unexpected %s on RDPMC(%" PRId32 ")",
46385c21181SLike Xu 		   exception_mnemonic(vector), index);
46485c21181SLike Xu 	return val;
4652b2d7aadSAvi Kivity }
4662b2d7aadSAvi Kivity 
467*fa68c037SSean Christopherson static inline int xgetbv_safe(u32 index, u64 *result)
468*fa68c037SSean Christopherson {
469*fa68c037SSean Christopherson 	return rdreg64_safe(".byte 0x0f,0x01,0xd0", index, result);
470*fa68c037SSean Christopherson }
471*fa68c037SSean Christopherson 
472*fa68c037SSean Christopherson static inline int xsetbv_safe(u32 index, u64 value)
473*fa68c037SSean Christopherson {
474*fa68c037SSean Christopherson 	return wrreg64_safe(".byte 0x0f,0x01,0xd1", index, value);
475*fa68c037SSean Christopherson }
476*fa68c037SSean Christopherson 
47764c8b768SSean Christopherson static inline int write_cr0_safe(ulong val)
47864c8b768SSean Christopherson {
479dd5d5bf1SSean Christopherson 	return asm_safe("mov %0,%%cr0", "r" (val));
48064c8b768SSean Christopherson }
48164c8b768SSean Christopherson 
4827d36db35SAvi Kivity static inline void write_cr0(ulong val)
4837d36db35SAvi Kivity {
48464c8b768SSean Christopherson 	int vector = write_cr0_safe(val);
48564c8b768SSean Christopherson 
48664c8b768SSean Christopherson 	assert_msg(!vector, "Unexpected fault '%d' writing CR0 = %lx",
48764c8b768SSean Christopherson 		   vector, val);
4887d36db35SAvi Kivity }
4897d36db35SAvi Kivity 
4907d36db35SAvi Kivity static inline ulong read_cr0(void)
4917d36db35SAvi Kivity {
4927d36db35SAvi Kivity 	ulong val;
4937d36db35SAvi Kivity 	asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory");
4947d36db35SAvi Kivity 	return val;
4957d36db35SAvi Kivity }
4967d36db35SAvi Kivity 
4977d36db35SAvi Kivity static inline void write_cr2(ulong val)
4987d36db35SAvi Kivity {
4997d36db35SAvi Kivity 	asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory");
5007d36db35SAvi Kivity }
5017d36db35SAvi Kivity 
5027d36db35SAvi Kivity static inline ulong read_cr2(void)
5037d36db35SAvi Kivity {
5047d36db35SAvi Kivity 	ulong val;
5057d36db35SAvi Kivity 	asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory");
5067d36db35SAvi Kivity 	return val;
5077d36db35SAvi Kivity }
5087d36db35SAvi Kivity 
50964c8b768SSean Christopherson static inline int write_cr3_safe(ulong val)
51064c8b768SSean Christopherson {
511dd5d5bf1SSean Christopherson 	return asm_safe("mov %0,%%cr3", "r" (val));
51264c8b768SSean Christopherson }
51364c8b768SSean Christopherson 
5147d36db35SAvi Kivity static inline void write_cr3(ulong val)
5157d36db35SAvi Kivity {
51664c8b768SSean Christopherson 	int vector = write_cr3_safe(val);
51764c8b768SSean Christopherson 
51864c8b768SSean Christopherson 	assert_msg(!vector, "Unexpected fault '%d' writing CR3 = %lx",
51964c8b768SSean Christopherson 		   vector, val);
5207d36db35SAvi Kivity }
5217d36db35SAvi Kivity 
5227d36db35SAvi Kivity static inline ulong read_cr3(void)
5237d36db35SAvi Kivity {
5247d36db35SAvi Kivity 	ulong val;
5257d36db35SAvi Kivity 	asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory");
5267d36db35SAvi Kivity 	return val;
5277d36db35SAvi Kivity }
5287d36db35SAvi Kivity 
5291c320e18SYadong Qi static inline void update_cr3(void *cr3)
5301c320e18SYadong Qi {
5311c320e18SYadong Qi 	write_cr3((ulong)cr3);
5321c320e18SYadong Qi }
5331c320e18SYadong Qi 
53464c8b768SSean Christopherson static inline int write_cr4_safe(ulong val)
53564c8b768SSean Christopherson {
536dd5d5bf1SSean Christopherson 	return asm_safe("mov %0,%%cr4", "r" (val));
53764c8b768SSean Christopherson }
53864c8b768SSean Christopherson 
5397d36db35SAvi Kivity static inline void write_cr4(ulong val)
5407d36db35SAvi Kivity {
54164c8b768SSean Christopherson 	int vector = write_cr4_safe(val);
54264c8b768SSean Christopherson 
54364c8b768SSean Christopherson 	assert_msg(!vector, "Unexpected fault '%d' writing CR4 = %lx",
54464c8b768SSean Christopherson 		   vector, val);
5457d36db35SAvi Kivity }
5467d36db35SAvi Kivity 
5477d36db35SAvi Kivity static inline ulong read_cr4(void)
5487d36db35SAvi Kivity {
5497d36db35SAvi Kivity 	ulong val;
5507d36db35SAvi Kivity 	asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory");
5517d36db35SAvi Kivity 	return val;
5527d36db35SAvi Kivity }
5537d36db35SAvi Kivity 
5547d36db35SAvi Kivity static inline void write_cr8(ulong val)
5557d36db35SAvi Kivity {
5567d36db35SAvi Kivity 	asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory");
5577d36db35SAvi Kivity }
5587d36db35SAvi Kivity 
5597d36db35SAvi Kivity static inline ulong read_cr8(void)
5607d36db35SAvi Kivity {
5617d36db35SAvi Kivity 	ulong val;
5627d36db35SAvi Kivity 	asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory");
5637d36db35SAvi Kivity 	return val;
5647d36db35SAvi Kivity }
5657d36db35SAvi Kivity 
5667d36db35SAvi Kivity static inline void lgdt(const struct descriptor_table_ptr *ptr)
5677d36db35SAvi Kivity {
5687d36db35SAvi Kivity 	asm volatile ("lgdt %0" : : "m"(*ptr));
5697d36db35SAvi Kivity }
5707d36db35SAvi Kivity 
5717d36db35SAvi Kivity static inline void sgdt(struct descriptor_table_ptr *ptr)
5727d36db35SAvi Kivity {
5737d36db35SAvi Kivity 	asm volatile ("sgdt %0" : "=m"(*ptr));
5747d36db35SAvi Kivity }
5757d36db35SAvi Kivity 
5767d36db35SAvi Kivity static inline void lidt(const struct descriptor_table_ptr *ptr)
5777d36db35SAvi Kivity {
5787d36db35SAvi Kivity 	asm volatile ("lidt %0" : : "m"(*ptr));
5797d36db35SAvi Kivity }
5807d36db35SAvi Kivity 
5817d36db35SAvi Kivity static inline void sidt(struct descriptor_table_ptr *ptr)
5827d36db35SAvi Kivity {
5837d36db35SAvi Kivity 	asm volatile ("sidt %0" : "=m"(*ptr));
5847d36db35SAvi Kivity }
5857d36db35SAvi Kivity 
5867a14c1d9SJim Mattson static inline void lldt(u16 val)
5877d36db35SAvi Kivity {
5887d36db35SAvi Kivity 	asm volatile ("lldt %0" : : "rm"(val));
5897d36db35SAvi Kivity }
5907d36db35SAvi Kivity 
5917d36db35SAvi Kivity static inline u16 sldt(void)
5927d36db35SAvi Kivity {
5937d36db35SAvi Kivity 	u16 val;
5947d36db35SAvi Kivity 	asm volatile ("sldt %0" : "=rm"(val));
5957d36db35SAvi Kivity 	return val;
5967d36db35SAvi Kivity }
5977d36db35SAvi Kivity 
598fd5d3dc6SAvi Kivity static inline void ltr(u16 val)
5997d36db35SAvi Kivity {
6007d36db35SAvi Kivity 	asm volatile ("ltr %0" : : "rm"(val));
6017d36db35SAvi Kivity }
6027d36db35SAvi Kivity 
6037d36db35SAvi Kivity static inline u16 str(void)
6047d36db35SAvi Kivity {
6057d36db35SAvi Kivity 	u16 val;
6067d36db35SAvi Kivity 	asm volatile ("str %0" : "=rm"(val));
6077d36db35SAvi Kivity 	return val;
6087d36db35SAvi Kivity }
6097d36db35SAvi Kivity 
6107f8f7356SKrish Sadhukhan static inline void write_dr0(void *val)
6117f8f7356SKrish Sadhukhan {
6127f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr0" : : "r"(val) : "memory");
6137f8f7356SKrish Sadhukhan }
6147f8f7356SKrish Sadhukhan 
6157f8f7356SKrish Sadhukhan static inline void write_dr1(void *val)
6167f8f7356SKrish Sadhukhan {
6177f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr1" : : "r"(val) : "memory");
6187f8f7356SKrish Sadhukhan }
6197f8f7356SKrish Sadhukhan 
6207f8f7356SKrish Sadhukhan static inline void write_dr2(void *val)
6217f8f7356SKrish Sadhukhan {
6227f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr2" : : "r"(val) : "memory");
6237f8f7356SKrish Sadhukhan }
6247f8f7356SKrish Sadhukhan 
6257f8f7356SKrish Sadhukhan static inline void write_dr3(void *val)
6267f8f7356SKrish Sadhukhan {
6277f8f7356SKrish Sadhukhan 	asm volatile ("mov %0, %%dr3" : : "r"(val) : "memory");
6287f8f7356SKrish Sadhukhan }
6297f8f7356SKrish Sadhukhan 
6307d36db35SAvi Kivity static inline void write_dr6(ulong val)
6317d36db35SAvi Kivity {
6327d36db35SAvi Kivity 	asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory");
6337d36db35SAvi Kivity }
6347d36db35SAvi Kivity 
6357d36db35SAvi Kivity static inline ulong read_dr6(void)
6367d36db35SAvi Kivity {
6377d36db35SAvi Kivity 	ulong val;
6387d36db35SAvi Kivity 	asm volatile ("mov %%dr6, %0" : "=r"(val));
6397d36db35SAvi Kivity 	return val;
6407d36db35SAvi Kivity }
6417d36db35SAvi Kivity 
6427d36db35SAvi Kivity static inline void write_dr7(ulong val)
6437d36db35SAvi Kivity {
6447d36db35SAvi Kivity 	asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory");
6457d36db35SAvi Kivity }
6467d36db35SAvi Kivity 
6477d36db35SAvi Kivity static inline ulong read_dr7(void)
6487d36db35SAvi Kivity {
6497d36db35SAvi Kivity 	ulong val;
6507d36db35SAvi Kivity 	asm volatile ("mov %%dr7, %0" : "=r"(val));
6517d36db35SAvi Kivity 	return val;
6527d36db35SAvi Kivity }
6537d36db35SAvi Kivity 
6547d36db35SAvi Kivity static inline void pause(void)
6557d36db35SAvi Kivity {
6567d36db35SAvi Kivity 	asm volatile ("pause");
6577d36db35SAvi Kivity }
6587d36db35SAvi Kivity 
6597d36db35SAvi Kivity static inline void cli(void)
6607d36db35SAvi Kivity {
6617d36db35SAvi Kivity 	asm volatile ("cli");
6627d36db35SAvi Kivity }
6637d36db35SAvi Kivity 
6647d36db35SAvi Kivity static inline void sti(void)
6657d36db35SAvi Kivity {
6667d36db35SAvi Kivity 	asm volatile ("sti");
6677d36db35SAvi Kivity }
6687d36db35SAvi Kivity 
669520e2789SBabu Moger static inline unsigned long long rdrand(void)
670520e2789SBabu Moger {
671520e2789SBabu Moger 	long long r;
672520e2789SBabu Moger 
673520e2789SBabu Moger 	asm volatile("rdrand %0\n\t"
674520e2789SBabu Moger 		     "jc 1f\n\t"
675520e2789SBabu Moger 		     "mov $0, %0\n\t"
676520e2789SBabu Moger 		     "1:\n\t" : "=r" (r));
677520e2789SBabu Moger 	return r;
678520e2789SBabu Moger }
679520e2789SBabu Moger 
6807db17e21SThomas Huth static inline unsigned long long rdtsc(void)
6810d7251beSJason Wang {
6820d7251beSJason Wang 	long long r;
6830d7251beSJason Wang 
6840d7251beSJason Wang #ifdef __x86_64__
6850d7251beSJason Wang 	unsigned a, d;
6860d7251beSJason Wang 
6870d7251beSJason Wang 	asm volatile ("rdtsc" : "=a"(a), "=d"(d));
6880d7251beSJason Wang 	r = a | ((long long)d << 32);
6890d7251beSJason Wang #else
6900d7251beSJason Wang 	asm volatile ("rdtsc" : "=A"(r));
6910d7251beSJason Wang #endif
6920d7251beSJason Wang 	return r;
6930d7251beSJason Wang }
6940d7251beSJason Wang 
695b49a1a6dSJim Mattson /*
696b49a1a6dSJim Mattson  * Per the advice in the SDM, volume 2, the sequence "mfence; lfence"
697b49a1a6dSJim Mattson  * executed immediately before rdtsc ensures that rdtsc will be
698b49a1a6dSJim Mattson  * executed only after all previous instructions have executed and all
699b49a1a6dSJim Mattson  * previous loads and stores are globally visible. In addition, the
700b49a1a6dSJim Mattson  * lfence immediately after rdtsc ensures that rdtsc will be executed
701b49a1a6dSJim Mattson  * prior to the execution of any subsequent instruction.
702b49a1a6dSJim Mattson  */
703b49a1a6dSJim Mattson static inline unsigned long long fenced_rdtsc(void)
704b49a1a6dSJim Mattson {
705b49a1a6dSJim Mattson 	unsigned long long tsc;
706b49a1a6dSJim Mattson 
707b49a1a6dSJim Mattson #ifdef __x86_64__
708b49a1a6dSJim Mattson 	unsigned int eax, edx;
709b49a1a6dSJim Mattson 
710b49a1a6dSJim Mattson 	asm volatile ("mfence; lfence; rdtsc; lfence" : "=a"(eax), "=d"(edx));
711b49a1a6dSJim Mattson 	tsc = eax | ((unsigned long long)edx << 32);
712b49a1a6dSJim Mattson #else
713b49a1a6dSJim Mattson 	asm volatile ("mfence; lfence; rdtsc; lfence" : "=A"(tsc));
714b49a1a6dSJim Mattson #endif
715b49a1a6dSJim Mattson 	return tsc;
716b49a1a6dSJim Mattson }
717b49a1a6dSJim Mattson 
718867f820dSPaolo Bonzini static inline unsigned long long rdtscp(u32 *aux)
719867f820dSPaolo Bonzini {
720867f820dSPaolo Bonzini 	long long r;
721867f820dSPaolo Bonzini 
722867f820dSPaolo Bonzini #ifdef __x86_64__
723867f820dSPaolo Bonzini 	unsigned a, d;
724867f820dSPaolo Bonzini 
725867f820dSPaolo Bonzini 	asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux));
726867f820dSPaolo Bonzini 	r = a | ((long long)d << 32);
727867f820dSPaolo Bonzini #else
728867f820dSPaolo Bonzini 	asm volatile ("rdtscp" : "=A"(r), "=c"(*aux));
729867f820dSPaolo Bonzini #endif
730867f820dSPaolo Bonzini 	return r;
731867f820dSPaolo Bonzini }
732867f820dSPaolo Bonzini 
7330d7251beSJason Wang static inline void wrtsc(u64 tsc)
7340d7251beSJason Wang {
735c47292f4SJim Mattson 	wrmsr(MSR_IA32_TSC, tsc);
7360d7251beSJason Wang }
7370d7251beSJason Wang 
738ae0a920bSGleb Natapov static inline void irq_disable(void)
739ae0a920bSGleb Natapov {
740ae0a920bSGleb Natapov 	asm volatile("cli");
741ae0a920bSGleb Natapov }
742ae0a920bSGleb Natapov 
743132c700dSPeter Feiner /* Note that irq_enable() does not ensure an interrupt shadow due
744132c700dSPeter Feiner  * to the vagaries of compiler optimizations.  If you need the
745132c700dSPeter Feiner  * shadow, use a single asm with "sti" and the instruction after it.
746132c700dSPeter Feiner  */
747ae0a920bSGleb Natapov static inline void irq_enable(void)
748ae0a920bSGleb Natapov {
749ae0a920bSGleb Natapov 	asm volatile("sti");
750ae0a920bSGleb Natapov }
751ae0a920bSGleb Natapov 
752fa6318d1SPaolo Bonzini static inline void invlpg(volatile void *va)
7534029c34bSGleb Natapov {
7544029c34bSGleb Natapov 	asm volatile("invlpg (%0)" ::"r" (va) : "memory");
7554029c34bSGleb Natapov }
756334cd2bfSGleb Natapov 
7578eb97e8fSSean Christopherson 
7588eb97e8fSSean Christopherson static inline int invpcid_safe(unsigned long type, void *desc)
7598eb97e8fSSean Christopherson {
7608eb97e8fSSean Christopherson 	/* invpcid (%rax), %rbx */
7618eb97e8fSSean Christopherson 	return asm_safe(".byte 0x66,0x0f,0x38,0x82,0x18", "a" (desc), "b" (type));
7628eb97e8fSSean Christopherson }
7638eb97e8fSSean Christopherson 
764334cd2bfSGleb Natapov static inline void safe_halt(void)
765334cd2bfSGleb Natapov {
766334cd2bfSGleb Natapov 	asm volatile("sti; hlt");
767334cd2bfSGleb Natapov }
7689d7eaa29SArthur Chunqi Li 
769e94079c5SPaolo Bonzini static inline u32 read_pkru(void)
770e94079c5SPaolo Bonzini {
771e94079c5SPaolo Bonzini 	unsigned int eax, edx;
772e94079c5SPaolo Bonzini 	unsigned int ecx = 0;
773e94079c5SPaolo Bonzini 	unsigned int pkru;
774e94079c5SPaolo Bonzini 
775e94079c5SPaolo Bonzini 	asm volatile(".byte 0x0f,0x01,0xee\n\t"
776e94079c5SPaolo Bonzini 		     : "=a" (eax), "=d" (edx)
777e94079c5SPaolo Bonzini 		     : "c" (ecx));
778e94079c5SPaolo Bonzini 	pkru = eax;
779e94079c5SPaolo Bonzini 	return pkru;
780e94079c5SPaolo Bonzini }
781e94079c5SPaolo Bonzini 
782e94079c5SPaolo Bonzini static inline void write_pkru(u32 pkru)
783e94079c5SPaolo Bonzini {
784e94079c5SPaolo Bonzini 	unsigned int eax = pkru;
785e94079c5SPaolo Bonzini 	unsigned int ecx = 0;
786e94079c5SPaolo Bonzini 	unsigned int edx = 0;
787e94079c5SPaolo Bonzini 
788e94079c5SPaolo Bonzini 	asm volatile(".byte 0x0f,0x01,0xef\n\t"
789e94079c5SPaolo Bonzini 		     : : "a" (eax), "c" (ecx), "d" (edx));
790e94079c5SPaolo Bonzini }
791e94079c5SPaolo Bonzini 
792aedfd771SJim Mattson static inline bool is_canonical(u64 addr)
793aedfd771SJim Mattson {
794f4a8b68cSLara Lazier 	int va_width = (raw_cpuid(0x80000008, 0).a & 0xff00) >> 8;
795f4a8b68cSLara Lazier 	int shift_amt = 64 - va_width;
796f4a8b68cSLara Lazier 
797f4a8b68cSLara Lazier 	return (s64)(addr << shift_amt) >> shift_amt == addr;
798aedfd771SJim Mattson }
799aedfd771SJim Mattson 
800e60c87fdSLiran Alon static inline void clear_bit(int bit, u8 *addr)
801e60c87fdSLiran Alon {
802e60c87fdSLiran Alon 	__asm__ __volatile__("btr %1, %0"
803e60c87fdSLiran Alon 			     : "+m" (*addr) : "Ir" (bit) : "cc", "memory");
804e60c87fdSLiran Alon }
805e60c87fdSLiran Alon 
806e60c87fdSLiran Alon static inline void set_bit(int bit, u8 *addr)
807e60c87fdSLiran Alon {
808e60c87fdSLiran Alon 	__asm__ __volatile__("bts %1, %0"
809e60c87fdSLiran Alon 			     : "+m" (*addr) : "Ir" (bit) : "cc", "memory");
810e60c87fdSLiran Alon }
811e60c87fdSLiran Alon 
8125868743aSMarc Orr static inline void flush_tlb(void)
8135868743aSMarc Orr {
8145868743aSMarc Orr 	ulong cr4;
8155868743aSMarc Orr 
8165868743aSMarc Orr 	cr4 = read_cr4();
8175868743aSMarc Orr 	write_cr4(cr4 ^ X86_CR4_PGE);
8185868743aSMarc Orr 	write_cr4(cr4);
8195868743aSMarc Orr }
8205868743aSMarc Orr 
821e39bee8fSSean Christopherson static inline void generate_non_canonical_gp(void)
822e39bee8fSSean Christopherson {
823e39bee8fSSean Christopherson 	*(volatile u64 *)NONCANONICAL = 0;
824e39bee8fSSean Christopherson }
825e39bee8fSSean Christopherson 
826e39bee8fSSean Christopherson static inline void generate_ud(void)
827e39bee8fSSean Christopherson {
828e39bee8fSSean Christopherson 	asm volatile ("ud2");
829e39bee8fSSean Christopherson }
830e39bee8fSSean Christopherson 
831e39bee8fSSean Christopherson static inline void generate_de(void)
832e39bee8fSSean Christopherson {
833e39bee8fSSean Christopherson 	asm volatile (
834e39bee8fSSean Christopherson 		"xor %%eax, %%eax\n\t"
835e39bee8fSSean Christopherson 		"xor %%ebx, %%ebx\n\t"
836e39bee8fSSean Christopherson 		"xor %%edx, %%edx\n\t"
837e39bee8fSSean Christopherson 		"idiv %%ebx\n\t"
838e39bee8fSSean Christopherson 		::: "eax", "ebx", "edx");
839e39bee8fSSean Christopherson }
840e39bee8fSSean Christopherson 
841e39bee8fSSean Christopherson static inline void generate_bp(void)
842e39bee8fSSean Christopherson {
843e39bee8fSSean Christopherson 	asm volatile ("int3");
844e39bee8fSSean Christopherson }
845e39bee8fSSean Christopherson 
846e39bee8fSSean Christopherson static inline void generate_single_step_db(void)
847e39bee8fSSean Christopherson {
848e39bee8fSSean Christopherson 	write_rflags(read_rflags() | X86_EFLAGS_TF);
849e39bee8fSSean Christopherson 	asm volatile("nop");
850e39bee8fSSean Christopherson }
851e39bee8fSSean Christopherson 
852e39bee8fSSean Christopherson static inline uint64_t generate_usermode_ac(void)
853e39bee8fSSean Christopherson {
854e39bee8fSSean Christopherson 	/*
855e39bee8fSSean Christopherson 	 * Trigger an #AC by writing 8 bytes to a 4-byte aligned address.
856e39bee8fSSean Christopherson 	 * Disclaimer: It is assumed that the stack pointer is aligned
857e39bee8fSSean Christopherson 	 * on a 16-byte boundary as x86_64 stacks should be.
858e39bee8fSSean Christopherson 	 */
859e39bee8fSSean Christopherson 	asm volatile("movq $0, -0x4(%rsp)");
860e39bee8fSSean Christopherson 
861e39bee8fSSean Christopherson 	return 0;
862e39bee8fSSean Christopherson }
863e39bee8fSSean Christopherson 
8645faf5f60SSean Christopherson /*
8655faf5f60SSean Christopherson  * Switch from 64-bit to 32-bit mode and generate #OF via INTO.  Note, if RIP
8665faf5f60SSean Christopherson  * or RSP holds a 64-bit value, this helper will NOT generate #OF.
8675faf5f60SSean Christopherson  */
8685faf5f60SSean Christopherson static inline void generate_of(void)
8695faf5f60SSean Christopherson {
8705faf5f60SSean Christopherson 	struct far_pointer32 fp = {
8715faf5f60SSean Christopherson 		.offset = (uintptr_t)&&into,
8725faf5f60SSean Christopherson 		.selector = KERNEL_CS32,
8735faf5f60SSean Christopherson 	};
8745faf5f60SSean Christopherson 	uintptr_t rsp;
8755faf5f60SSean Christopherson 
8765faf5f60SSean Christopherson 	asm volatile ("mov %%rsp, %0" : "=r"(rsp));
8775faf5f60SSean Christopherson 
8785faf5f60SSean Christopherson 	if (fp.offset != (uintptr_t)&&into) {
8795faf5f60SSean Christopherson 		printf("Code address too high.\n");
8805faf5f60SSean Christopherson 		return;
8815faf5f60SSean Christopherson 	}
8825faf5f60SSean Christopherson 	if ((u32)rsp != rsp) {
8835faf5f60SSean Christopherson 		printf("Stack address too high.\n");
8845faf5f60SSean Christopherson 		return;
8855faf5f60SSean Christopherson 	}
8865faf5f60SSean Christopherson 
8875faf5f60SSean Christopherson 	asm goto ("lcall *%0" : : "m" (fp) : "rax" : into);
8885faf5f60SSean Christopherson 	return;
8895faf5f60SSean Christopherson into:
8905faf5f60SSean Christopherson 	asm volatile (".code32;"
8915faf5f60SSean Christopherson 		      "movl $0x7fffffff, %eax;"
8925faf5f60SSean Christopherson 		      "addl %eax, %eax;"
8935faf5f60SSean Christopherson 		      "into;"
8945faf5f60SSean Christopherson 		      "lret;"
8955faf5f60SSean Christopherson 		      ".code64");
8965faf5f60SSean Christopherson 	__builtin_unreachable();
8975faf5f60SSean Christopherson }
8985faf5f60SSean Christopherson 
899694e59baSManali Shukla static inline void fnop(void)
900694e59baSManali Shukla {
901694e59baSManali Shukla 	asm volatile("fnop");
902694e59baSManali Shukla }
903694e59baSManali Shukla 
904694e59baSManali Shukla /* If CR0.TS is set in L2, #NM is generated. */
905694e59baSManali Shukla static inline void generate_cr0_ts_nm(void)
906694e59baSManali Shukla {
907694e59baSManali Shukla 	write_cr0((read_cr0() & ~X86_CR0_EM) | X86_CR0_TS);
908694e59baSManali Shukla 	fnop();
909694e59baSManali Shukla }
910694e59baSManali Shukla 
911694e59baSManali Shukla /* If CR0.TS is cleared and CR0.EM is set, #NM is generated. */
912694e59baSManali Shukla static inline void generate_cr0_em_nm(void)
913694e59baSManali Shukla {
914694e59baSManali Shukla 	write_cr0((read_cr0() & ~X86_CR0_TS) | X86_CR0_EM);
915694e59baSManali Shukla 	fnop();
916694e59baSManali Shukla }
917694e59baSManali Shukla 
9187d36db35SAvi Kivity #endif
919