xref: /kvm-unit-tests/lib/linux/compiler.h (revision 4ceb02bf68f00a0cfe3532cf8cc96c101a6a16af)
1d5b60621SAlexandru Elisei /* SPDX-License-Identifier: GPL-2.0 */
2d5b60621SAlexandru Elisei /*
3d5b60621SAlexandru Elisei  * Taken from Linux commit 219d54332a09 ("Linux 5.4"), from the file
4d5b60621SAlexandru Elisei  * tools/include/linux/compiler.h, with minor changes.
5d5b60621SAlexandru Elisei  */
6d5b60621SAlexandru Elisei #ifndef __LINUX_COMPILER_H
7d5b60621SAlexandru Elisei #define __LINUX_COMPILER_H
8d5b60621SAlexandru Elisei 
9d5b60621SAlexandru Elisei #ifndef __ASSEMBLY__
10d5b60621SAlexandru Elisei 
11*4ceb02bfSAndrew Jones #define GCC_VERSION (__GNUC__ * 10000           \
12*4ceb02bfSAndrew Jones 		     + __GNUC_MINOR__ * 100     \
13*4ceb02bfSAndrew Jones 		     + __GNUC_PATCHLEVEL__)
14*4ceb02bfSAndrew Jones 
15*4ceb02bfSAndrew Jones #ifdef __clang__
16*4ceb02bfSAndrew Jones #if __has_builtin(__builtin_add_overflow) && \
17*4ceb02bfSAndrew Jones     __has_builtin(__builtin_sub_overflow) && \
18*4ceb02bfSAndrew Jones     __has_builtin(__builtin_mul_overflow)
19*4ceb02bfSAndrew Jones #define COMPILER_HAS_GENERIC_BUILTIN_OVERFLOW 1
20*4ceb02bfSAndrew Jones #define check_add_overflow(a, b) ({			\
21*4ceb02bfSAndrew Jones 	typeof((a) + (b)) __d;				\
22*4ceb02bfSAndrew Jones 	__builtin_add_overflow(a, b, &__d);		\
23*4ceb02bfSAndrew Jones })
24*4ceb02bfSAndrew Jones #define check_sub_overflow(a, b) ({			\
25*4ceb02bfSAndrew Jones 	typeof((a) - (b)) __d;				\
26*4ceb02bfSAndrew Jones 	__builtin_sub_overflow(a, b, &__d);		\
27*4ceb02bfSAndrew Jones })
28*4ceb02bfSAndrew Jones #define check_mul_overflow(a, b) ({			\
29*4ceb02bfSAndrew Jones 	typeof((a) * (b)) __d;				\
30*4ceb02bfSAndrew Jones 	__builtin_mul_overflow(a, b, &__d);		\
31*4ceb02bfSAndrew Jones })
32*4ceb02bfSAndrew Jones #endif
33*4ceb02bfSAndrew Jones #elif GCC_VERSION >= 50100
34*4ceb02bfSAndrew Jones #define COMPILER_HAS_GENERIC_BUILTIN_OVERFLOW 1
35*4ceb02bfSAndrew Jones #define check_add_overflow(a, b) __builtin_add_overflow_p(a, b, (typeof((a) + (b)))0)
36*4ceb02bfSAndrew Jones #define check_sub_overflow(a, b) __builtin_add_overflow_p(a, b, (typeof((a) - (b)))0)
37*4ceb02bfSAndrew Jones #define check_mul_overflow(a, b) __builtin_add_overflow_p(a, b, (typeof((a) * (b)))0)
38*4ceb02bfSAndrew Jones #else
39*4ceb02bfSAndrew Jones #define check_add_overflow(a, b) ({ (void)((int)(a) == (int)(b)); 0; })
40*4ceb02bfSAndrew Jones #define check_sub_overflow(a, b) ({ (void)((int)(a) == (int)(b)); 0; })
41*4ceb02bfSAndrew Jones #define check_mul_overflow(a, b) ({ (void)((int)(a) == (int)(b)); 0; })
42*4ceb02bfSAndrew Jones #endif
43*4ceb02bfSAndrew Jones 
44d5b60621SAlexandru Elisei #include <stdint.h>
45d5b60621SAlexandru Elisei 
46d5b60621SAlexandru Elisei #define barrier()	asm volatile("" : : : "memory")
47d5b60621SAlexandru Elisei 
48d5b60621SAlexandru Elisei #define __always_inline	inline __attribute__((always_inline))
49d5b60621SAlexandru Elisei 
50d5b60621SAlexandru Elisei static __always_inline void __read_once_size(const volatile void *p, void *res, int size)
51d5b60621SAlexandru Elisei {
52d5b60621SAlexandru Elisei 	switch (size) {
53d5b60621SAlexandru Elisei 	case 1: *(uint8_t *)res = *(volatile uint8_t *)p; break;
54d5b60621SAlexandru Elisei 	case 2: *(uint16_t *)res = *(volatile uint16_t *)p; break;
55d5b60621SAlexandru Elisei 	case 4: *(uint32_t *)res = *(volatile uint32_t *)p; break;
56d5b60621SAlexandru Elisei 	case 8: *(uint64_t *)res = *(volatile uint64_t *)p; break;
57d5b60621SAlexandru Elisei 	default:
58d5b60621SAlexandru Elisei 		barrier();
59d5b60621SAlexandru Elisei 		__builtin_memcpy((void *)res, (const void *)p, size);
60d5b60621SAlexandru Elisei 		barrier();
61d5b60621SAlexandru Elisei 	}
62d5b60621SAlexandru Elisei }
63d5b60621SAlexandru Elisei 
64d5b60621SAlexandru Elisei /*
65d5b60621SAlexandru Elisei  * Prevent the compiler from merging or refetching reads or writes. The
66d5b60621SAlexandru Elisei  * compiler is also forbidden from reordering successive instances of
67d5b60621SAlexandru Elisei  * READ_ONCE and WRITE_ONCE, but only when the compiler is aware of some
68d5b60621SAlexandru Elisei  * particular ordering. One way to make the compiler aware of ordering is to
69d5b60621SAlexandru Elisei  * put the two invocations of READ_ONCE or WRITE_ONCE in different C
70d5b60621SAlexandru Elisei  * statements.
71d5b60621SAlexandru Elisei  *
72d5b60621SAlexandru Elisei  * These two macros will also work on aggregate data types like structs or
73d5b60621SAlexandru Elisei  * unions. If the size of the accessed data type exceeds the word size of
74d5b60621SAlexandru Elisei  * the machine (e.g., 32 bits or 64 bits) READ_ONCE() and WRITE_ONCE() will
75d5b60621SAlexandru Elisei  * fall back to memcpy and print a compile-time warning.
76d5b60621SAlexandru Elisei  *
77d5b60621SAlexandru Elisei  * Their two major use cases are: (1) Mediating communication between
78d5b60621SAlexandru Elisei  * process-level code and irq/NMI handlers, all running on the same CPU,
79d5b60621SAlexandru Elisei  * and (2) Ensuring that the compiler does not fold, spindle, or otherwise
80d5b60621SAlexandru Elisei  * mutilate accesses that either do not require ordering or that interact
81d5b60621SAlexandru Elisei  * with an explicit memory barrier or atomic instruction that provides the
82d5b60621SAlexandru Elisei  * required ordering.
83d5b60621SAlexandru Elisei  */
84d5b60621SAlexandru Elisei 
85d5b60621SAlexandru Elisei #define READ_ONCE(x)					\
86d5b60621SAlexandru Elisei ({							\
87d5b60621SAlexandru Elisei 	union { typeof(x) __val; char __c[1]; } __u =	\
88d5b60621SAlexandru Elisei 		{ .__c = { 0 } };			\
89d5b60621SAlexandru Elisei 	__read_once_size(&(x), __u.__c, sizeof(x));	\
90d5b60621SAlexandru Elisei 	__u.__val;					\
91d5b60621SAlexandru Elisei })
92d5b60621SAlexandru Elisei 
93d5b60621SAlexandru Elisei static __always_inline void __write_once_size(volatile void *p, void *res, int size)
94d5b60621SAlexandru Elisei {
95d5b60621SAlexandru Elisei 	switch (size) {
96d5b60621SAlexandru Elisei 	case 1: *(volatile uint8_t *) p = *(uint8_t  *) res; break;
97d5b60621SAlexandru Elisei 	case 2: *(volatile uint16_t *) p = *(uint16_t *) res; break;
98d5b60621SAlexandru Elisei 	case 4: *(volatile uint32_t *) p = *(uint32_t *) res; break;
99d5b60621SAlexandru Elisei 	case 8: *(volatile uint64_t *) p = *(uint64_t *) res; break;
100d5b60621SAlexandru Elisei 	default:
101d5b60621SAlexandru Elisei 		barrier();
102d5b60621SAlexandru Elisei 		__builtin_memcpy((void *)p, (const void *)res, size);
103d5b60621SAlexandru Elisei 		barrier();
104d5b60621SAlexandru Elisei 	}
105d5b60621SAlexandru Elisei }
106d5b60621SAlexandru Elisei 
107d5b60621SAlexandru Elisei #define WRITE_ONCE(x, val)				\
108d5b60621SAlexandru Elisei ({							\
109d5b60621SAlexandru Elisei 	union { typeof(x) __val; char __c[1]; } __u =	\
110d5b60621SAlexandru Elisei 		{ .__val = (val) }; 			\
111d5b60621SAlexandru Elisei 	__write_once_size(&(x), __u.__c, sizeof(x));	\
112d5b60621SAlexandru Elisei 	__u.__val;					\
113d5b60621SAlexandru Elisei })
114d5b60621SAlexandru Elisei 
115d5b60621SAlexandru Elisei #endif /* !__ASSEMBLY__ */
116d5b60621SAlexandru Elisei #endif /* !__LINUX_COMPILER_H */
117