xref: /kvm-unit-tests/lib/x86/processor.h (revision f1071c6d5fc454957e2501934fea6d9e592da8a5)
1 #ifndef LIBCFLAT_PROCESSOR_H
2 #define LIBCFLAT_PROCESSOR_H
3 
4 #include "libcflat.h"
5 #include "msr.h"
6 #include <stdint.h>
7 
8 #ifdef __x86_64__
9 #  define R "r"
10 #  define W "q"
11 #  define S "8"
12 #else
13 #  define R "e"
14 #  define W "l"
15 #  define S "4"
16 #endif
17 
18 #define X86_CR0_PE     0x00000001
19 #define X86_CR0_MP     0x00000002
20 #define X86_CR0_TS     0x00000008
21 #define X86_CR0_WP     0x00010000
22 #define X86_CR0_AM     0x00040000
23 #define X86_CR0_PG     0x80000000
24 #define X86_CR4_TSD    0x00000004
25 #define X86_CR4_DE     0x00000008
26 #define X86_CR4_PSE    0x00000010
27 #define X86_CR4_PAE    0x00000020
28 #define X86_CR4_VMXE   0x00002000
29 #define X86_CR4_PCIDE  0x00020000
30 #define X86_CR4_SMAP   0x00200000
31 #define X86_CR4_PKE    0x00400000
32 
33 #define X86_EFLAGS_CF  0x00000001
34 #define X86_EFLAGS_PF  0x00000004
35 #define X86_EFLAGS_AF  0x00000010
36 #define X86_EFLAGS_ZF  0x00000040
37 #define X86_EFLAGS_SF  0x00000080
38 #define X86_EFLAGS_OF  0x00000800
39 #define X86_EFLAGS_AC  0x00040000
40 
41 #define X86_IA32_EFER          0xc0000080
42 #define X86_EFER_LMA           (1UL << 8)
43 
44 struct far_pointer32 {
45 	u32 offset;
46 	u16 selector;
47 } __attribute__((packed));
48 
49 struct descriptor_table_ptr {
50     u16 limit;
51     ulong base;
52 } __attribute__((packed));
53 
54 static inline void barrier(void)
55 {
56     asm volatile ("" : : : "memory");
57 }
58 
59 static inline void clac(void)
60 {
61     asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory");
62 }
63 
64 static inline void stac(void)
65 {
66     asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory");
67 }
68 
69 static inline u16 read_cs(void)
70 {
71     unsigned val;
72 
73     asm volatile ("mov %%cs, %0" : "=mr"(val));
74     return val;
75 }
76 
77 static inline u16 read_ds(void)
78 {
79     unsigned val;
80 
81     asm volatile ("mov %%ds, %0" : "=mr"(val));
82     return val;
83 }
84 
85 static inline u16 read_es(void)
86 {
87     unsigned val;
88 
89     asm volatile ("mov %%es, %0" : "=mr"(val));
90     return val;
91 }
92 
93 static inline u16 read_ss(void)
94 {
95     unsigned val;
96 
97     asm volatile ("mov %%ss, %0" : "=mr"(val));
98     return val;
99 }
100 
101 static inline u16 read_fs(void)
102 {
103     unsigned val;
104 
105     asm volatile ("mov %%fs, %0" : "=mr"(val));
106     return val;
107 }
108 
109 static inline u16 read_gs(void)
110 {
111     unsigned val;
112 
113     asm volatile ("mov %%gs, %0" : "=mr"(val));
114     return val;
115 }
116 
117 static inline unsigned long read_rflags(void)
118 {
119 	unsigned long f;
120 	asm volatile ("pushf; pop %0\n\t" : "=rm"(f));
121 	return f;
122 }
123 
124 static inline void write_ds(unsigned val)
125 {
126     asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory");
127 }
128 
129 static inline void write_es(unsigned val)
130 {
131     asm volatile ("mov %0, %%es" : : "rm"(val) : "memory");
132 }
133 
134 static inline void write_ss(unsigned val)
135 {
136     asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory");
137 }
138 
139 static inline void write_fs(unsigned val)
140 {
141     asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory");
142 }
143 
144 static inline void write_gs(unsigned val)
145 {
146     asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory");
147 }
148 
149 static inline void write_rflags(unsigned long f)
150 {
151     asm volatile ("push %0; popf\n\t" : : "rm"(f));
152 }
153 
154 static inline u64 rdmsr(u32 index)
155 {
156     u32 a, d;
157     asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory");
158     return a | ((u64)d << 32);
159 }
160 
161 static inline void wrmsr(u32 index, u64 val)
162 {
163     u32 a = val, d = val >> 32;
164     asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory");
165 }
166 
167 static inline uint64_t rdpmc(uint32_t index)
168 {
169     uint32_t a, d;
170     asm volatile ("rdpmc" : "=a"(a), "=d"(d) : "c"(index));
171     return a | ((uint64_t)d << 32);
172 }
173 
174 static inline void write_cr0(ulong val)
175 {
176     asm volatile ("mov %0, %%cr0" : : "r"(val) : "memory");
177 }
178 
179 static inline ulong read_cr0(void)
180 {
181     ulong val;
182     asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory");
183     return val;
184 }
185 
186 static inline void write_cr2(ulong val)
187 {
188     asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory");
189 }
190 
191 static inline ulong read_cr2(void)
192 {
193     ulong val;
194     asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory");
195     return val;
196 }
197 
198 static inline void write_cr3(ulong val)
199 {
200     asm volatile ("mov %0, %%cr3" : : "r"(val) : "memory");
201 }
202 
203 static inline ulong read_cr3(void)
204 {
205     ulong val;
206     asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory");
207     return val;
208 }
209 
210 static inline void write_cr4(ulong val)
211 {
212     asm volatile ("mov %0, %%cr4" : : "r"(val) : "memory");
213 }
214 
215 static inline ulong read_cr4(void)
216 {
217     ulong val;
218     asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory");
219     return val;
220 }
221 
222 static inline void write_cr8(ulong val)
223 {
224     asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory");
225 }
226 
227 static inline ulong read_cr8(void)
228 {
229     ulong val;
230     asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory");
231     return val;
232 }
233 
234 static inline void lgdt(const struct descriptor_table_ptr *ptr)
235 {
236     asm volatile ("lgdt %0" : : "m"(*ptr));
237 }
238 
239 static inline void sgdt(struct descriptor_table_ptr *ptr)
240 {
241     asm volatile ("sgdt %0" : "=m"(*ptr));
242 }
243 
244 static inline void lidt(const struct descriptor_table_ptr *ptr)
245 {
246     asm volatile ("lidt %0" : : "m"(*ptr));
247 }
248 
249 static inline void sidt(struct descriptor_table_ptr *ptr)
250 {
251     asm volatile ("sidt %0" : "=m"(*ptr));
252 }
253 
254 static inline void lldt(unsigned val)
255 {
256     asm volatile ("lldt %0" : : "rm"(val));
257 }
258 
259 static inline u16 sldt(void)
260 {
261     u16 val;
262     asm volatile ("sldt %0" : "=rm"(val));
263     return val;
264 }
265 
266 static inline void ltr(u16 val)
267 {
268     asm volatile ("ltr %0" : : "rm"(val));
269 }
270 
271 static inline u16 str(void)
272 {
273     u16 val;
274     asm volatile ("str %0" : "=rm"(val));
275     return val;
276 }
277 
278 static inline void write_dr6(ulong val)
279 {
280     asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory");
281 }
282 
283 static inline ulong read_dr6(void)
284 {
285     ulong val;
286     asm volatile ("mov %%dr6, %0" : "=r"(val));
287     return val;
288 }
289 
290 static inline void write_dr7(ulong val)
291 {
292     asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory");
293 }
294 
295 static inline ulong read_dr7(void)
296 {
297     ulong val;
298     asm volatile ("mov %%dr7, %0" : "=r"(val));
299     return val;
300 }
301 
302 struct cpuid { u32 a, b, c, d; };
303 
304 static inline struct cpuid raw_cpuid(u32 function, u32 index)
305 {
306     struct cpuid r;
307     asm volatile ("cpuid"
308                   : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d)
309                   : "0"(function), "2"(index));
310     return r;
311 }
312 
313 static inline struct cpuid cpuid_indexed(u32 function, u32 index)
314 {
315     u32 level = raw_cpuid(function & 0xf0000000, 0).a;
316     if (level < function)
317         return (struct cpuid) { 0, 0, 0, 0 };
318     return raw_cpuid(function, index);
319 }
320 
321 static inline struct cpuid cpuid(u32 function)
322 {
323     return cpuid_indexed(function, 0);
324 }
325 
326 static inline u8 cpuid_maxphyaddr(void)
327 {
328     if (raw_cpuid(0x80000000, 0).a < 0x80000008)
329         return 36;
330     return raw_cpuid(0x80000008, 0).a & 0xff;
331 }
332 
333 
334 static inline void pause(void)
335 {
336     asm volatile ("pause");
337 }
338 
339 static inline void cli(void)
340 {
341     asm volatile ("cli");
342 }
343 
344 static inline void sti(void)
345 {
346     asm volatile ("sti");
347 }
348 
349 static inline unsigned long long rdtsc()
350 {
351 	long long r;
352 
353 #ifdef __x86_64__
354 	unsigned a, d;
355 
356 	asm volatile ("rdtsc" : "=a"(a), "=d"(d));
357 	r = a | ((long long)d << 32);
358 #else
359 	asm volatile ("rdtsc" : "=A"(r));
360 #endif
361 	return r;
362 }
363 
364 static inline unsigned long long rdtscp(u32 *aux)
365 {
366        long long r;
367 
368 #ifdef __x86_64__
369        unsigned a, d;
370 
371        asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux));
372        r = a | ((long long)d << 32);
373 #else
374        asm volatile ("rdtscp" : "=A"(r), "=c"(*aux));
375 #endif
376        return r;
377 }
378 
379 static inline void wrtsc(u64 tsc)
380 {
381 	unsigned a = tsc, d = tsc >> 32;
382 
383 	asm volatile("wrmsr" : : "a"(a), "d"(d), "c"(0x10));
384 }
385 
386 static inline void irq_disable(void)
387 {
388     asm volatile("cli");
389 }
390 
391 /* Note that irq_enable() does not ensure an interrupt shadow due
392  * to the vagaries of compiler optimizations.  If you need the
393  * shadow, use a single asm with "sti" and the instruction after it.
394  */
395 static inline void irq_enable(void)
396 {
397     asm volatile("sti");
398 }
399 
400 static inline void invlpg(volatile void *va)
401 {
402 	asm volatile("invlpg (%0)" ::"r" (va) : "memory");
403 }
404 
405 static inline void safe_halt(void)
406 {
407 	asm volatile("sti; hlt");
408 }
409 
410 static inline u32 read_pkru(void)
411 {
412     unsigned int eax, edx;
413     unsigned int ecx = 0;
414     unsigned int pkru;
415 
416     asm volatile(".byte 0x0f,0x01,0xee\n\t"
417                  : "=a" (eax), "=d" (edx)
418                  : "c" (ecx));
419     pkru = eax;
420     return pkru;
421 }
422 
423 static inline void write_pkru(u32 pkru)
424 {
425     unsigned int eax = pkru;
426     unsigned int ecx = 0;
427     unsigned int edx = 0;
428 
429     asm volatile(".byte 0x0f,0x01,0xef\n\t"
430         : : "a" (eax), "c" (ecx), "d" (edx));
431 }
432 
433 #endif
434