xref: /kvm-unit-tests/lib/x86/processor.h (revision 5868743a312d0492efda222f6b1c507dd9595abc)
1 #ifndef LIBCFLAT_PROCESSOR_H
2 #define LIBCFLAT_PROCESSOR_H
3 
4 #include "libcflat.h"
5 #include "msr.h"
6 #include <stdint.h>
7 
8 #ifdef __x86_64__
9 #  define R "r"
10 #  define W "q"
11 #  define S "8"
12 #else
13 #  define R "e"
14 #  define W "l"
15 #  define S "4"
16 #endif
17 
18 #define DF_VECTOR 8
19 #define TS_VECTOR 10
20 #define NP_VECTOR 11
21 #define SS_VECTOR 12
22 #define GP_VECTOR 13
23 #define PF_VECTOR 14
24 #define AC_VECTOR 17
25 
26 #define X86_CR0_PE     0x00000001
27 #define X86_CR0_MP     0x00000002
28 #define X86_CR0_EM     0x00000004
29 #define X86_CR0_TS     0x00000008
30 #define X86_CR0_WP     0x00010000
31 #define X86_CR0_AM     0x00040000
32 #define X86_CR0_PG     0x80000000
33 #define X86_CR3_PCID_MASK 0x00000fff
34 #define X86_CR4_TSD    0x00000004
35 #define X86_CR4_DE     0x00000008
36 #define X86_CR4_PSE    0x00000010
37 #define X86_CR4_PAE    0x00000020
38 #define X86_CR4_MCE    0x00000040
39 #define X86_CR4_PGE    0x00000080
40 #define X86_CR4_PCE    0x00000100
41 #define X86_CR4_UMIP   0x00000800
42 #define X86_CR4_VMXE   0x00002000
43 #define X86_CR4_PCIDE  0x00020000
44 #define X86_CR4_SMEP   0x00100000
45 #define X86_CR4_SMAP   0x00200000
46 #define X86_CR4_PKE    0x00400000
47 
48 #define X86_EFLAGS_CF    0x00000001
49 #define X86_EFLAGS_FIXED 0x00000002
50 #define X86_EFLAGS_PF    0x00000004
51 #define X86_EFLAGS_AF    0x00000010
52 #define X86_EFLAGS_ZF    0x00000040
53 #define X86_EFLAGS_SF    0x00000080
54 #define X86_EFLAGS_TF    0x00000100
55 #define X86_EFLAGS_IF    0x00000200
56 #define X86_EFLAGS_DF    0x00000400
57 #define X86_EFLAGS_OF    0x00000800
58 #define X86_EFLAGS_IOPL  0x00003000
59 #define X86_EFLAGS_NT    0x00004000
60 #define X86_EFLAGS_AC    0x00040000
61 
62 #define X86_EFLAGS_ALU (X86_EFLAGS_CF | X86_EFLAGS_PF | X86_EFLAGS_AF | \
63 			X86_EFLAGS_ZF | X86_EFLAGS_SF | X86_EFLAGS_OF)
64 
65 #define X86_IA32_EFER          0xc0000080
66 #define X86_EFER_LMA           (1UL << 8)
67 
68 struct far_pointer32 {
69 	u32 offset;
70 	u16 selector;
71 } __attribute__((packed));
72 
73 struct descriptor_table_ptr {
74     u16 limit;
75     ulong base;
76 } __attribute__((packed));
77 
78 static inline void barrier(void)
79 {
80     asm volatile ("" : : : "memory");
81 }
82 
83 static inline void clac(void)
84 {
85     asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory");
86 }
87 
88 static inline void stac(void)
89 {
90     asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory");
91 }
92 
93 static inline u16 read_cs(void)
94 {
95     unsigned val;
96 
97     asm volatile ("mov %%cs, %0" : "=mr"(val));
98     return val;
99 }
100 
101 static inline u16 read_ds(void)
102 {
103     unsigned val;
104 
105     asm volatile ("mov %%ds, %0" : "=mr"(val));
106     return val;
107 }
108 
109 static inline u16 read_es(void)
110 {
111     unsigned val;
112 
113     asm volatile ("mov %%es, %0" : "=mr"(val));
114     return val;
115 }
116 
117 static inline u16 read_ss(void)
118 {
119     unsigned val;
120 
121     asm volatile ("mov %%ss, %0" : "=mr"(val));
122     return val;
123 }
124 
125 static inline u16 read_fs(void)
126 {
127     unsigned val;
128 
129     asm volatile ("mov %%fs, %0" : "=mr"(val));
130     return val;
131 }
132 
133 static inline u16 read_gs(void)
134 {
135     unsigned val;
136 
137     asm volatile ("mov %%gs, %0" : "=mr"(val));
138     return val;
139 }
140 
141 static inline unsigned long read_rflags(void)
142 {
143 	unsigned long f;
144 	asm volatile ("pushf; pop %0\n\t" : "=rm"(f));
145 	return f;
146 }
147 
148 static inline void write_ds(unsigned val)
149 {
150     asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory");
151 }
152 
153 static inline void write_es(unsigned val)
154 {
155     asm volatile ("mov %0, %%es" : : "rm"(val) : "memory");
156 }
157 
158 static inline void write_ss(unsigned val)
159 {
160     asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory");
161 }
162 
163 static inline void write_fs(unsigned val)
164 {
165     asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory");
166 }
167 
168 static inline void write_gs(unsigned val)
169 {
170     asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory");
171 }
172 
173 static inline void write_rflags(unsigned long f)
174 {
175     asm volatile ("push %0; popf\n\t" : : "rm"(f));
176 }
177 
178 static inline void set_iopl(int iopl)
179 {
180 	unsigned long flags = read_rflags() & ~X86_EFLAGS_IOPL;
181 	flags |= iopl * (X86_EFLAGS_IOPL / 3);
182 	write_rflags(flags);
183 }
184 
185 static inline u64 rdmsr(u32 index)
186 {
187     u32 a, d;
188     asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory");
189     return a | ((u64)d << 32);
190 }
191 
192 static inline void wrmsr(u32 index, u64 val)
193 {
194     u32 a = val, d = val >> 32;
195     asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory");
196 }
197 
198 static inline uint64_t rdpmc(uint32_t index)
199 {
200     uint32_t a, d;
201     asm volatile ("rdpmc" : "=a"(a), "=d"(d) : "c"(index));
202     return a | ((uint64_t)d << 32);
203 }
204 
205 static inline void write_cr0(ulong val)
206 {
207     asm volatile ("mov %0, %%cr0" : : "r"(val) : "memory");
208 }
209 
210 static inline ulong read_cr0(void)
211 {
212     ulong val;
213     asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory");
214     return val;
215 }
216 
217 static inline void write_cr2(ulong val)
218 {
219     asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory");
220 }
221 
222 static inline ulong read_cr2(void)
223 {
224     ulong val;
225     asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory");
226     return val;
227 }
228 
229 static inline void write_cr3(ulong val)
230 {
231     asm volatile ("mov %0, %%cr3" : : "r"(val) : "memory");
232 }
233 
234 static inline ulong read_cr3(void)
235 {
236     ulong val;
237     asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory");
238     return val;
239 }
240 
241 static inline void write_cr4(ulong val)
242 {
243     asm volatile ("mov %0, %%cr4" : : "r"(val) : "memory");
244 }
245 
246 static inline ulong read_cr4(void)
247 {
248     ulong val;
249     asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory");
250     return val;
251 }
252 
253 static inline void write_cr8(ulong val)
254 {
255     asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory");
256 }
257 
258 static inline ulong read_cr8(void)
259 {
260     ulong val;
261     asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory");
262     return val;
263 }
264 
265 static inline void lgdt(const struct descriptor_table_ptr *ptr)
266 {
267     asm volatile ("lgdt %0" : : "m"(*ptr));
268 }
269 
270 static inline void sgdt(struct descriptor_table_ptr *ptr)
271 {
272     asm volatile ("sgdt %0" : "=m"(*ptr));
273 }
274 
275 static inline void lidt(const struct descriptor_table_ptr *ptr)
276 {
277     asm volatile ("lidt %0" : : "m"(*ptr));
278 }
279 
280 static inline void sidt(struct descriptor_table_ptr *ptr)
281 {
282     asm volatile ("sidt %0" : "=m"(*ptr));
283 }
284 
285 static inline void lldt(unsigned val)
286 {
287     asm volatile ("lldt %0" : : "rm"(val));
288 }
289 
290 static inline u16 sldt(void)
291 {
292     u16 val;
293     asm volatile ("sldt %0" : "=rm"(val));
294     return val;
295 }
296 
297 static inline void ltr(u16 val)
298 {
299     asm volatile ("ltr %0" : : "rm"(val));
300 }
301 
302 static inline u16 str(void)
303 {
304     u16 val;
305     asm volatile ("str %0" : "=rm"(val));
306     return val;
307 }
308 
309 static inline void write_dr6(ulong val)
310 {
311     asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory");
312 }
313 
314 static inline ulong read_dr6(void)
315 {
316     ulong val;
317     asm volatile ("mov %%dr6, %0" : "=r"(val));
318     return val;
319 }
320 
321 static inline void write_dr7(ulong val)
322 {
323     asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory");
324 }
325 
326 static inline ulong read_dr7(void)
327 {
328     ulong val;
329     asm volatile ("mov %%dr7, %0" : "=r"(val));
330     return val;
331 }
332 
333 struct cpuid { u32 a, b, c, d; };
334 
335 static inline struct cpuid raw_cpuid(u32 function, u32 index)
336 {
337     struct cpuid r;
338     asm volatile ("cpuid"
339                   : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d)
340                   : "0"(function), "2"(index));
341     return r;
342 }
343 
344 static inline struct cpuid cpuid_indexed(u32 function, u32 index)
345 {
346     u32 level = raw_cpuid(function & 0xf0000000, 0).a;
347     if (level < function)
348         return (struct cpuid) { 0, 0, 0, 0 };
349     return raw_cpuid(function, index);
350 }
351 
352 static inline struct cpuid cpuid(u32 function)
353 {
354     return cpuid_indexed(function, 0);
355 }
356 
357 static inline u8 cpuid_maxphyaddr(void)
358 {
359     if (raw_cpuid(0x80000000, 0).a < 0x80000008)
360         return 36;
361     return raw_cpuid(0x80000008, 0).a & 0xff;
362 }
363 
364 
365 static inline void pause(void)
366 {
367     asm volatile ("pause");
368 }
369 
370 static inline void cli(void)
371 {
372     asm volatile ("cli");
373 }
374 
375 static inline void sti(void)
376 {
377     asm volatile ("sti");
378 }
379 
380 static inline unsigned long long rdtsc(void)
381 {
382 	long long r;
383 
384 #ifdef __x86_64__
385 	unsigned a, d;
386 
387 	asm volatile ("rdtsc" : "=a"(a), "=d"(d));
388 	r = a | ((long long)d << 32);
389 #else
390 	asm volatile ("rdtsc" : "=A"(r));
391 #endif
392 	return r;
393 }
394 
395 static inline unsigned long long rdtscp(u32 *aux)
396 {
397        long long r;
398 
399 #ifdef __x86_64__
400        unsigned a, d;
401 
402        asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux));
403        r = a | ((long long)d << 32);
404 #else
405        asm volatile ("rdtscp" : "=A"(r), "=c"(*aux));
406 #endif
407        return r;
408 }
409 
410 static inline void wrtsc(u64 tsc)
411 {
412 	unsigned a = tsc, d = tsc >> 32;
413 
414 	asm volatile("wrmsr" : : "a"(a), "d"(d), "c"(0x10));
415 }
416 
417 static inline void irq_disable(void)
418 {
419     asm volatile("cli");
420 }
421 
422 /* Note that irq_enable() does not ensure an interrupt shadow due
423  * to the vagaries of compiler optimizations.  If you need the
424  * shadow, use a single asm with "sti" and the instruction after it.
425  */
426 static inline void irq_enable(void)
427 {
428     asm volatile("sti");
429 }
430 
431 static inline void invlpg(volatile void *va)
432 {
433 	asm volatile("invlpg (%0)" ::"r" (va) : "memory");
434 }
435 
436 static inline void safe_halt(void)
437 {
438 	asm volatile("sti; hlt");
439 }
440 
441 static inline u32 read_pkru(void)
442 {
443     unsigned int eax, edx;
444     unsigned int ecx = 0;
445     unsigned int pkru;
446 
447     asm volatile(".byte 0x0f,0x01,0xee\n\t"
448                  : "=a" (eax), "=d" (edx)
449                  : "c" (ecx));
450     pkru = eax;
451     return pkru;
452 }
453 
454 static inline void write_pkru(u32 pkru)
455 {
456     unsigned int eax = pkru;
457     unsigned int ecx = 0;
458     unsigned int edx = 0;
459 
460     asm volatile(".byte 0x0f,0x01,0xef\n\t"
461         : : "a" (eax), "c" (ecx), "d" (edx));
462 }
463 
464 static inline bool is_canonical(u64 addr)
465 {
466 	return (s64)(addr << 16) >> 16 == addr;
467 }
468 
469 static inline void clear_bit(int bit, u8 *addr)
470 {
471 	__asm__ __volatile__("btr %1, %0"
472 			     : "+m" (*addr) : "Ir" (bit) : "cc", "memory");
473 }
474 
475 static inline void set_bit(int bit, u8 *addr)
476 {
477 	__asm__ __volatile__("bts %1, %0"
478 			     : "+m" (*addr) : "Ir" (bit) : "cc", "memory");
479 }
480 
481 static inline void flush_tlb(void)
482 {
483 	ulong cr4;
484 
485 	cr4 = read_cr4();
486 	write_cr4(cr4 ^ X86_CR4_PGE);
487 	write_cr4(cr4);
488 }
489 
490 #endif
491