xref: /kvm-unit-tests/lib/x86/processor.h (revision 21ee643dca247e4926c7a3b64742584e4a151f92)
1 #ifndef LIBCFLAT_PROCESSOR_H
2 #define LIBCFLAT_PROCESSOR_H
3 
4 #include "libcflat.h"
5 #include "msr.h"
6 #include <stdint.h>
7 
8 #ifdef __x86_64__
9 #  define R "r"
10 #  define W "q"
11 #  define S "8"
12 #else
13 #  define R "e"
14 #  define W "l"
15 #  define S "4"
16 #endif
17 
18 #define X86_CR0_PE     0x00000001
19 #define X86_CR0_MP     0x00000002
20 #define X86_CR0_TS     0x00000008
21 #define X86_CR0_WP     0x00010000
22 #define X86_CR0_AM     0x00040000
23 #define X86_CR0_PG     0x80000000
24 #define X86_CR3_PCID_MASK 0x00000fff
25 #define X86_CR4_TSD    0x00000004
26 #define X86_CR4_DE     0x00000008
27 #define X86_CR4_PSE    0x00000010
28 #define X86_CR4_PAE    0x00000020
29 #define X86_CR4_MCE    0x00000040
30 #define X86_CR4_VMXE   0x00002000
31 #define X86_CR4_PCIDE  0x00020000
32 #define X86_CR4_SMAP   0x00200000
33 #define X86_CR4_PKE    0x00400000
34 
35 #define X86_EFLAGS_CF    0x00000001
36 #define X86_EFLAGS_FIXED 0x00000002
37 #define X86_EFLAGS_PF    0x00000004
38 #define X86_EFLAGS_AF    0x00000010
39 #define X86_EFLAGS_ZF    0x00000040
40 #define X86_EFLAGS_SF    0x00000080
41 #define X86_EFLAGS_TF    0x00000100
42 #define X86_EFLAGS_IF    0x00000200
43 #define X86_EFLAGS_DF    0x00000400
44 #define X86_EFLAGS_OF    0x00000800
45 #define X86_EFLAGS_NT    0x00004000
46 #define X86_EFLAGS_AC    0x00040000
47 
48 #define X86_IA32_EFER          0xc0000080
49 #define X86_EFER_LMA           (1UL << 8)
50 
51 struct far_pointer32 {
52 	u32 offset;
53 	u16 selector;
54 } __attribute__((packed));
55 
56 struct descriptor_table_ptr {
57     u16 limit;
58     ulong base;
59 } __attribute__((packed));
60 
61 static inline void barrier(void)
62 {
63     asm volatile ("" : : : "memory");
64 }
65 
66 static inline void clac(void)
67 {
68     asm volatile (".byte 0x0f, 0x01, 0xca" : : : "memory");
69 }
70 
71 static inline void stac(void)
72 {
73     asm volatile (".byte 0x0f, 0x01, 0xcb" : : : "memory");
74 }
75 
76 static inline u16 read_cs(void)
77 {
78     unsigned val;
79 
80     asm volatile ("mov %%cs, %0" : "=mr"(val));
81     return val;
82 }
83 
84 static inline u16 read_ds(void)
85 {
86     unsigned val;
87 
88     asm volatile ("mov %%ds, %0" : "=mr"(val));
89     return val;
90 }
91 
92 static inline u16 read_es(void)
93 {
94     unsigned val;
95 
96     asm volatile ("mov %%es, %0" : "=mr"(val));
97     return val;
98 }
99 
100 static inline u16 read_ss(void)
101 {
102     unsigned val;
103 
104     asm volatile ("mov %%ss, %0" : "=mr"(val));
105     return val;
106 }
107 
108 static inline u16 read_fs(void)
109 {
110     unsigned val;
111 
112     asm volatile ("mov %%fs, %0" : "=mr"(val));
113     return val;
114 }
115 
116 static inline u16 read_gs(void)
117 {
118     unsigned val;
119 
120     asm volatile ("mov %%gs, %0" : "=mr"(val));
121     return val;
122 }
123 
124 static inline unsigned long read_rflags(void)
125 {
126 	unsigned long f;
127 	asm volatile ("pushf; pop %0\n\t" : "=rm"(f));
128 	return f;
129 }
130 
131 static inline void write_ds(unsigned val)
132 {
133     asm volatile ("mov %0, %%ds" : : "rm"(val) : "memory");
134 }
135 
136 static inline void write_es(unsigned val)
137 {
138     asm volatile ("mov %0, %%es" : : "rm"(val) : "memory");
139 }
140 
141 static inline void write_ss(unsigned val)
142 {
143     asm volatile ("mov %0, %%ss" : : "rm"(val) : "memory");
144 }
145 
146 static inline void write_fs(unsigned val)
147 {
148     asm volatile ("mov %0, %%fs" : : "rm"(val) : "memory");
149 }
150 
151 static inline void write_gs(unsigned val)
152 {
153     asm volatile ("mov %0, %%gs" : : "rm"(val) : "memory");
154 }
155 
156 static inline void write_rflags(unsigned long f)
157 {
158     asm volatile ("push %0; popf\n\t" : : "rm"(f));
159 }
160 
161 static inline u64 rdmsr(u32 index)
162 {
163     u32 a, d;
164     asm volatile ("rdmsr" : "=a"(a), "=d"(d) : "c"(index) : "memory");
165     return a | ((u64)d << 32);
166 }
167 
168 static inline void wrmsr(u32 index, u64 val)
169 {
170     u32 a = val, d = val >> 32;
171     asm volatile ("wrmsr" : : "a"(a), "d"(d), "c"(index) : "memory");
172 }
173 
174 static inline uint64_t rdpmc(uint32_t index)
175 {
176     uint32_t a, d;
177     asm volatile ("rdpmc" : "=a"(a), "=d"(d) : "c"(index));
178     return a | ((uint64_t)d << 32);
179 }
180 
181 static inline void write_cr0(ulong val)
182 {
183     asm volatile ("mov %0, %%cr0" : : "r"(val) : "memory");
184 }
185 
186 static inline ulong read_cr0(void)
187 {
188     ulong val;
189     asm volatile ("mov %%cr0, %0" : "=r"(val) : : "memory");
190     return val;
191 }
192 
193 static inline void write_cr2(ulong val)
194 {
195     asm volatile ("mov %0, %%cr2" : : "r"(val) : "memory");
196 }
197 
198 static inline ulong read_cr2(void)
199 {
200     ulong val;
201     asm volatile ("mov %%cr2, %0" : "=r"(val) : : "memory");
202     return val;
203 }
204 
205 static inline void write_cr3(ulong val)
206 {
207     asm volatile ("mov %0, %%cr3" : : "r"(val) : "memory");
208 }
209 
210 static inline ulong read_cr3(void)
211 {
212     ulong val;
213     asm volatile ("mov %%cr3, %0" : "=r"(val) : : "memory");
214     return val;
215 }
216 
217 static inline void write_cr4(ulong val)
218 {
219     asm volatile ("mov %0, %%cr4" : : "r"(val) : "memory");
220 }
221 
222 static inline ulong read_cr4(void)
223 {
224     ulong val;
225     asm volatile ("mov %%cr4, %0" : "=r"(val) : : "memory");
226     return val;
227 }
228 
229 static inline void write_cr8(ulong val)
230 {
231     asm volatile ("mov %0, %%cr8" : : "r"(val) : "memory");
232 }
233 
234 static inline ulong read_cr8(void)
235 {
236     ulong val;
237     asm volatile ("mov %%cr8, %0" : "=r"(val) : : "memory");
238     return val;
239 }
240 
241 static inline void lgdt(const struct descriptor_table_ptr *ptr)
242 {
243     asm volatile ("lgdt %0" : : "m"(*ptr));
244 }
245 
246 static inline void sgdt(struct descriptor_table_ptr *ptr)
247 {
248     asm volatile ("sgdt %0" : "=m"(*ptr));
249 }
250 
251 static inline void lidt(const struct descriptor_table_ptr *ptr)
252 {
253     asm volatile ("lidt %0" : : "m"(*ptr));
254 }
255 
256 static inline void sidt(struct descriptor_table_ptr *ptr)
257 {
258     asm volatile ("sidt %0" : "=m"(*ptr));
259 }
260 
261 static inline void lldt(unsigned val)
262 {
263     asm volatile ("lldt %0" : : "rm"(val));
264 }
265 
266 static inline u16 sldt(void)
267 {
268     u16 val;
269     asm volatile ("sldt %0" : "=rm"(val));
270     return val;
271 }
272 
273 static inline void ltr(u16 val)
274 {
275     asm volatile ("ltr %0" : : "rm"(val));
276 }
277 
278 static inline u16 str(void)
279 {
280     u16 val;
281     asm volatile ("str %0" : "=rm"(val));
282     return val;
283 }
284 
285 static inline void write_dr6(ulong val)
286 {
287     asm volatile ("mov %0, %%dr6" : : "r"(val) : "memory");
288 }
289 
290 static inline ulong read_dr6(void)
291 {
292     ulong val;
293     asm volatile ("mov %%dr6, %0" : "=r"(val));
294     return val;
295 }
296 
297 static inline void write_dr7(ulong val)
298 {
299     asm volatile ("mov %0, %%dr7" : : "r"(val) : "memory");
300 }
301 
302 static inline ulong read_dr7(void)
303 {
304     ulong val;
305     asm volatile ("mov %%dr7, %0" : "=r"(val));
306     return val;
307 }
308 
309 struct cpuid { u32 a, b, c, d; };
310 
311 static inline struct cpuid raw_cpuid(u32 function, u32 index)
312 {
313     struct cpuid r;
314     asm volatile ("cpuid"
315                   : "=a"(r.a), "=b"(r.b), "=c"(r.c), "=d"(r.d)
316                   : "0"(function), "2"(index));
317     return r;
318 }
319 
320 static inline struct cpuid cpuid_indexed(u32 function, u32 index)
321 {
322     u32 level = raw_cpuid(function & 0xf0000000, 0).a;
323     if (level < function)
324         return (struct cpuid) { 0, 0, 0, 0 };
325     return raw_cpuid(function, index);
326 }
327 
328 static inline struct cpuid cpuid(u32 function)
329 {
330     return cpuid_indexed(function, 0);
331 }
332 
333 static inline u8 cpuid_maxphyaddr(void)
334 {
335     if (raw_cpuid(0x80000000, 0).a < 0x80000008)
336         return 36;
337     return raw_cpuid(0x80000008, 0).a & 0xff;
338 }
339 
340 
341 static inline void pause(void)
342 {
343     asm volatile ("pause");
344 }
345 
346 static inline void cli(void)
347 {
348     asm volatile ("cli");
349 }
350 
351 static inline void sti(void)
352 {
353     asm volatile ("sti");
354 }
355 
356 static inline unsigned long long rdtsc()
357 {
358 	long long r;
359 
360 #ifdef __x86_64__
361 	unsigned a, d;
362 
363 	asm volatile ("rdtsc" : "=a"(a), "=d"(d));
364 	r = a | ((long long)d << 32);
365 #else
366 	asm volatile ("rdtsc" : "=A"(r));
367 #endif
368 	return r;
369 }
370 
371 static inline unsigned long long rdtscp(u32 *aux)
372 {
373        long long r;
374 
375 #ifdef __x86_64__
376        unsigned a, d;
377 
378        asm volatile ("rdtscp" : "=a"(a), "=d"(d), "=c"(*aux));
379        r = a | ((long long)d << 32);
380 #else
381        asm volatile ("rdtscp" : "=A"(r), "=c"(*aux));
382 #endif
383        return r;
384 }
385 
386 static inline void wrtsc(u64 tsc)
387 {
388 	unsigned a = tsc, d = tsc >> 32;
389 
390 	asm volatile("wrmsr" : : "a"(a), "d"(d), "c"(0x10));
391 }
392 
393 static inline void irq_disable(void)
394 {
395     asm volatile("cli");
396 }
397 
398 /* Note that irq_enable() does not ensure an interrupt shadow due
399  * to the vagaries of compiler optimizations.  If you need the
400  * shadow, use a single asm with "sti" and the instruction after it.
401  */
402 static inline void irq_enable(void)
403 {
404     asm volatile("sti");
405 }
406 
407 static inline void invlpg(volatile void *va)
408 {
409 	asm volatile("invlpg (%0)" ::"r" (va) : "memory");
410 }
411 
412 static inline void safe_halt(void)
413 {
414 	asm volatile("sti; hlt");
415 }
416 
417 static inline u32 read_pkru(void)
418 {
419     unsigned int eax, edx;
420     unsigned int ecx = 0;
421     unsigned int pkru;
422 
423     asm volatile(".byte 0x0f,0x01,0xee\n\t"
424                  : "=a" (eax), "=d" (edx)
425                  : "c" (ecx));
426     pkru = eax;
427     return pkru;
428 }
429 
430 static inline void write_pkru(u32 pkru)
431 {
432     unsigned int eax = pkru;
433     unsigned int ecx = 0;
434     unsigned int edx = 0;
435 
436     asm volatile(".byte 0x0f,0x01,0xef\n\t"
437         : : "a" (eax), "c" (ecx), "d" (edx));
438 }
439 
440 static inline bool is_canonical(u64 addr)
441 {
442 	return (s64)(addr << 16) >> 16 == addr;
443 }
444 
445 #endif
446