xref: /kvm-unit-tests/lib/linux/compiler.h (revision d5b60621b0e6863fd62e4657b7a2ec70bf7eee34)
1*d5b60621SAlexandru Elisei /* SPDX-License-Identifier: GPL-2.0 */
2*d5b60621SAlexandru Elisei /*
3*d5b60621SAlexandru Elisei  * Taken from Linux commit 219d54332a09 ("Linux 5.4"), from the file
4*d5b60621SAlexandru Elisei  * tools/include/linux/compiler.h, with minor changes.
5*d5b60621SAlexandru Elisei  */
6*d5b60621SAlexandru Elisei #ifndef __LINUX_COMPILER_H
7*d5b60621SAlexandru Elisei #define __LINUX_COMPILER_H
8*d5b60621SAlexandru Elisei 
9*d5b60621SAlexandru Elisei #ifndef __ASSEMBLY__
10*d5b60621SAlexandru Elisei 
11*d5b60621SAlexandru Elisei #include <stdint.h>
12*d5b60621SAlexandru Elisei 
13*d5b60621SAlexandru Elisei #define barrier()	asm volatile("" : : : "memory")
14*d5b60621SAlexandru Elisei 
15*d5b60621SAlexandru Elisei #define __always_inline	inline __attribute__((always_inline))
16*d5b60621SAlexandru Elisei 
17*d5b60621SAlexandru Elisei static __always_inline void __read_once_size(const volatile void *p, void *res, int size)
18*d5b60621SAlexandru Elisei {
19*d5b60621SAlexandru Elisei 	switch (size) {
20*d5b60621SAlexandru Elisei 	case 1: *(uint8_t *)res = *(volatile uint8_t *)p; break;
21*d5b60621SAlexandru Elisei 	case 2: *(uint16_t *)res = *(volatile uint16_t *)p; break;
22*d5b60621SAlexandru Elisei 	case 4: *(uint32_t *)res = *(volatile uint32_t *)p; break;
23*d5b60621SAlexandru Elisei 	case 8: *(uint64_t *)res = *(volatile uint64_t *)p; break;
24*d5b60621SAlexandru Elisei 	default:
25*d5b60621SAlexandru Elisei 		barrier();
26*d5b60621SAlexandru Elisei 		__builtin_memcpy((void *)res, (const void *)p, size);
27*d5b60621SAlexandru Elisei 		barrier();
28*d5b60621SAlexandru Elisei 	}
29*d5b60621SAlexandru Elisei }
30*d5b60621SAlexandru Elisei 
31*d5b60621SAlexandru Elisei /*
32*d5b60621SAlexandru Elisei  * Prevent the compiler from merging or refetching reads or writes. The
33*d5b60621SAlexandru Elisei  * compiler is also forbidden from reordering successive instances of
34*d5b60621SAlexandru Elisei  * READ_ONCE and WRITE_ONCE, but only when the compiler is aware of some
35*d5b60621SAlexandru Elisei  * particular ordering. One way to make the compiler aware of ordering is to
36*d5b60621SAlexandru Elisei  * put the two invocations of READ_ONCE or WRITE_ONCE in different C
37*d5b60621SAlexandru Elisei  * statements.
38*d5b60621SAlexandru Elisei  *
39*d5b60621SAlexandru Elisei  * These two macros will also work on aggregate data types like structs or
40*d5b60621SAlexandru Elisei  * unions. If the size of the accessed data type exceeds the word size of
41*d5b60621SAlexandru Elisei  * the machine (e.g., 32 bits or 64 bits) READ_ONCE() and WRITE_ONCE() will
42*d5b60621SAlexandru Elisei  * fall back to memcpy and print a compile-time warning.
43*d5b60621SAlexandru Elisei  *
44*d5b60621SAlexandru Elisei  * Their two major use cases are: (1) Mediating communication between
45*d5b60621SAlexandru Elisei  * process-level code and irq/NMI handlers, all running on the same CPU,
46*d5b60621SAlexandru Elisei  * and (2) Ensuring that the compiler does not fold, spindle, or otherwise
47*d5b60621SAlexandru Elisei  * mutilate accesses that either do not require ordering or that interact
48*d5b60621SAlexandru Elisei  * with an explicit memory barrier or atomic instruction that provides the
49*d5b60621SAlexandru Elisei  * required ordering.
50*d5b60621SAlexandru Elisei  */
51*d5b60621SAlexandru Elisei 
52*d5b60621SAlexandru Elisei #define READ_ONCE(x)					\
53*d5b60621SAlexandru Elisei ({							\
54*d5b60621SAlexandru Elisei 	union { typeof(x) __val; char __c[1]; } __u =	\
55*d5b60621SAlexandru Elisei 		{ .__c = { 0 } };			\
56*d5b60621SAlexandru Elisei 	__read_once_size(&(x), __u.__c, sizeof(x));	\
57*d5b60621SAlexandru Elisei 	__u.__val;					\
58*d5b60621SAlexandru Elisei })
59*d5b60621SAlexandru Elisei 
60*d5b60621SAlexandru Elisei static __always_inline void __write_once_size(volatile void *p, void *res, int size)
61*d5b60621SAlexandru Elisei {
62*d5b60621SAlexandru Elisei 	switch (size) {
63*d5b60621SAlexandru Elisei 	case 1: *(volatile uint8_t *) p = *(uint8_t  *) res; break;
64*d5b60621SAlexandru Elisei 	case 2: *(volatile uint16_t *) p = *(uint16_t *) res; break;
65*d5b60621SAlexandru Elisei 	case 4: *(volatile uint32_t *) p = *(uint32_t *) res; break;
66*d5b60621SAlexandru Elisei 	case 8: *(volatile uint64_t *) p = *(uint64_t *) res; break;
67*d5b60621SAlexandru Elisei 	default:
68*d5b60621SAlexandru Elisei 		barrier();
69*d5b60621SAlexandru Elisei 		__builtin_memcpy((void *)p, (const void *)res, size);
70*d5b60621SAlexandru Elisei 		barrier();
71*d5b60621SAlexandru Elisei 	}
72*d5b60621SAlexandru Elisei }
73*d5b60621SAlexandru Elisei 
74*d5b60621SAlexandru Elisei #define WRITE_ONCE(x, val)				\
75*d5b60621SAlexandru Elisei ({							\
76*d5b60621SAlexandru Elisei 	union { typeof(x) __val; char __c[1]; } __u =	\
77*d5b60621SAlexandru Elisei 		{ .__val = (val) }; 			\
78*d5b60621SAlexandru Elisei 	__write_once_size(&(x), __u.__c, sizeof(x));	\
79*d5b60621SAlexandru Elisei 	__u.__val;					\
80*d5b60621SAlexandru Elisei })
81*d5b60621SAlexandru Elisei 
82*d5b60621SAlexandru Elisei #endif /* !__ASSEMBLY__ */
83*d5b60621SAlexandru Elisei #endif /* !__LINUX_COMPILER_H */
84