xref: /linux/kernel/kcsan/selftest.c (revision a23e1966932464e1c5226cb9ac4ce1d5fc10ba22)
1dfd402a4SMarco Elver // SPDX-License-Identifier: GPL-2.0
2bd0ccc4aSMarco Elver /*
3bd0ccc4aSMarco Elver  * KCSAN short boot-time selftests.
4bd0ccc4aSMarco Elver  *
5bd0ccc4aSMarco Elver  * Copyright (C) 2019, Google LLC.
6bd0ccc4aSMarco Elver  */
7dfd402a4SMarco Elver 
8178a1877SMarco Elver #define pr_fmt(fmt) "kcsan: " fmt
9178a1877SMarco Elver 
1071b0e3aeSMarco Elver #include <linux/atomic.h>
1171b0e3aeSMarco Elver #include <linux/bitops.h>
12dfd402a4SMarco Elver #include <linux/init.h>
1371b0e3aeSMarco Elver #include <linux/kcsan-checks.h>
14dfd402a4SMarco Elver #include <linux/kernel.h>
15dfd402a4SMarco Elver #include <linux/printk.h>
16dfd402a4SMarco Elver #include <linux/random.h>
1771b0e3aeSMarco Elver #include <linux/sched.h>
1871b0e3aeSMarco Elver #include <linux/spinlock.h>
19dfd402a4SMarco Elver #include <linux/types.h>
20dfd402a4SMarco Elver 
21dfd402a4SMarco Elver #include "encoding.h"
22dfd402a4SMarco Elver 
23dfd402a4SMarco Elver #define ITERS_PER_TEST 2000
24dfd402a4SMarco Elver 
25dfd402a4SMarco Elver /*
26dfd402a4SMarco Elver  * Test watchpoint encode and decode: check that encoding some access's info,
27dfd402a4SMarco Elver  * and then subsequent decode preserves the access's info.
28dfd402a4SMarco Elver  */
test_encode_decode(void)29ac20e39eSMarco Elver static bool __init test_encode_decode(void)
30dfd402a4SMarco Elver {
31dfd402a4SMarco Elver 	int i;
32dfd402a4SMarco Elver 
33dfd402a4SMarco Elver 	for (i = 0; i < ITERS_PER_TEST; ++i) {
34e8a533cbSJason A. Donenfeld 		size_t size = get_random_u32_inclusive(1, MAX_ENCODABLE_SIZE);
358032bf12SJason A. Donenfeld 		bool is_write = !!get_random_u32_below(2);
36ac20e39eSMarco Elver 		unsigned long verif_masked_addr;
37ac20e39eSMarco Elver 		long encoded_watchpoint;
38ac20e39eSMarco Elver 		bool verif_is_write;
39dfd402a4SMarco Elver 		unsigned long addr;
40ac20e39eSMarco Elver 		size_t verif_size;
41dfd402a4SMarco Elver 
42197173dbSJason A. Donenfeld 		get_random_bytes(&addr, sizeof(addr));
434761612fSMarco Elver 		if (addr < PAGE_SIZE)
444761612fSMarco Elver 			addr = PAGE_SIZE;
454761612fSMarco Elver 
46dfd402a4SMarco Elver 		if (WARN_ON(!check_encodable(addr, size)))
47dfd402a4SMarco Elver 			return false;
48dfd402a4SMarco Elver 
49ac20e39eSMarco Elver 		encoded_watchpoint = encode_watchpoint(addr, size, is_write);
50dfd402a4SMarco Elver 
515cbaefe9SIngo Molnar 		/* Check special watchpoints */
52ac20e39eSMarco Elver 		if (WARN_ON(decode_watchpoint(INVALID_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
53dfd402a4SMarco Elver 			return false;
54ac20e39eSMarco Elver 		if (WARN_ON(decode_watchpoint(CONSUMED_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
55dfd402a4SMarco Elver 			return false;
56dfd402a4SMarco Elver 
575cbaefe9SIngo Molnar 		/* Check decoding watchpoint returns same data */
58ac20e39eSMarco Elver 		if (WARN_ON(!decode_watchpoint(encoded_watchpoint, &verif_masked_addr, &verif_size, &verif_is_write)))
59dfd402a4SMarco Elver 			return false;
60ac20e39eSMarco Elver 		if (WARN_ON(verif_masked_addr != (addr & WATCHPOINT_ADDR_MASK)))
61dfd402a4SMarco Elver 			goto fail;
62dfd402a4SMarco Elver 		if (WARN_ON(verif_size != size))
63dfd402a4SMarco Elver 			goto fail;
64dfd402a4SMarco Elver 		if (WARN_ON(is_write != verif_is_write))
65dfd402a4SMarco Elver 			goto fail;
66dfd402a4SMarco Elver 
67dfd402a4SMarco Elver 		continue;
68dfd402a4SMarco Elver fail:
69dfd402a4SMarco Elver 		pr_err("%s fail: %s %zu bytes @ %lx -> encoded: %lx -> %s %zu bytes @ %lx\n",
70ac20e39eSMarco Elver 		       __func__, is_write ? "write" : "read", size, addr, encoded_watchpoint,
71ac20e39eSMarco Elver 		       verif_is_write ? "write" : "read", verif_size, verif_masked_addr);
72dfd402a4SMarco Elver 		return false;
73dfd402a4SMarco Elver 	}
74dfd402a4SMarco Elver 
75dfd402a4SMarco Elver 	return true;
76dfd402a4SMarco Elver }
77dfd402a4SMarco Elver 
78dfd402a4SMarco Elver /* Test access matching function. */
test_matching_access(void)79ac20e39eSMarco Elver static bool __init test_matching_access(void)
80dfd402a4SMarco Elver {
81dfd402a4SMarco Elver 	if (WARN_ON(!matching_access(10, 1, 10, 1)))
82dfd402a4SMarco Elver 		return false;
83dfd402a4SMarco Elver 	if (WARN_ON(!matching_access(10, 2, 11, 1)))
84dfd402a4SMarco Elver 		return false;
85dfd402a4SMarco Elver 	if (WARN_ON(!matching_access(10, 1, 9, 2)))
86dfd402a4SMarco Elver 		return false;
87dfd402a4SMarco Elver 	if (WARN_ON(matching_access(10, 1, 11, 1)))
88dfd402a4SMarco Elver 		return false;
89dfd402a4SMarco Elver 	if (WARN_ON(matching_access(9, 1, 10, 1)))
90dfd402a4SMarco Elver 		return false;
91ed95f95cSMarco Elver 
92ed95f95cSMarco Elver 	/*
93ed95f95cSMarco Elver 	 * An access of size 0 could match another access, as demonstrated here.
94ed95f95cSMarco Elver 	 * Rather than add more comparisons to 'matching_access()', which would
95ed95f95cSMarco Elver 	 * end up in the fast-path for *all* checks, check_access() simply
96ed95f95cSMarco Elver 	 * returns for all accesses of size 0.
97ed95f95cSMarco Elver 	 */
98ed95f95cSMarco Elver 	if (WARN_ON(!matching_access(8, 8, 12, 0)))
99ed95f95cSMarco Elver 		return false;
100ed95f95cSMarco Elver 
101dfd402a4SMarco Elver 	return true;
102dfd402a4SMarco Elver }
103dfd402a4SMarco Elver 
10471b0e3aeSMarco Elver /*
10571b0e3aeSMarco Elver  * Correct memory barrier instrumentation is critical to avoiding false
10671b0e3aeSMarco Elver  * positives: simple test to check at boot certain barriers are always properly
10771b0e3aeSMarco Elver  * instrumented. See kcsan_test for a more complete test.
10871b0e3aeSMarco Elver  */
109a70d36e6SMarco Elver static DEFINE_SPINLOCK(test_spinlock);
test_barrier(void)11071b0e3aeSMarco Elver static bool __init test_barrier(void)
11171b0e3aeSMarco Elver {
11271b0e3aeSMarco Elver #ifdef CONFIG_KCSAN_WEAK_MEMORY
11371b0e3aeSMarco Elver 	struct kcsan_scoped_access *reorder_access = &current->kcsan_ctx.reorder_access;
11471b0e3aeSMarco Elver #else
11571b0e3aeSMarco Elver 	struct kcsan_scoped_access *reorder_access = NULL;
11671b0e3aeSMarco Elver #endif
11771b0e3aeSMarco Elver 	bool ret = true;
11871b0e3aeSMarco Elver 	arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
11971b0e3aeSMarco Elver 	atomic_t dummy;
12071b0e3aeSMarco Elver 	long test_var;
12171b0e3aeSMarco Elver 
12271b0e3aeSMarco Elver 	if (!reorder_access || !IS_ENABLED(CONFIG_SMP))
12371b0e3aeSMarco Elver 		return true;
12471b0e3aeSMarco Elver 
12571b0e3aeSMarco Elver #define __KCSAN_CHECK_BARRIER(access_type, barrier, name)					\
12671b0e3aeSMarco Elver 	do {											\
12771b0e3aeSMarco Elver 		reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED;			\
12871b0e3aeSMarco Elver 		reorder_access->size = 1;							\
12971b0e3aeSMarco Elver 		barrier;									\
13071b0e3aeSMarco Elver 		if (reorder_access->size != 0) {						\
13171b0e3aeSMarco Elver 			pr_err("improperly instrumented type=(" #access_type "): " name "\n");	\
13271b0e3aeSMarco Elver 			ret = false;								\
13371b0e3aeSMarco Elver 		}										\
13471b0e3aeSMarco Elver 	} while (0)
13571b0e3aeSMarco Elver #define KCSAN_CHECK_READ_BARRIER(b)  __KCSAN_CHECK_BARRIER(0, b, #b)
13671b0e3aeSMarco Elver #define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
13771b0e3aeSMarco Elver #define KCSAN_CHECK_RW_BARRIER(b)    __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
13871b0e3aeSMarco Elver 
13971b0e3aeSMarco Elver 	kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
14071b0e3aeSMarco Elver 
14171b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(mb());
14271b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(rmb());
14371b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_mb());
14471b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_rmb());
14571b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(dma_rmb());
14671b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_mb__before_atomic());
14771b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_mb__after_atomic());
14871b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
14971b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_store_mb(test_var, 0));
15071b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(smp_store_release(&test_var, 0));
15171b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(xchg(&test_var, 0));
15271b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(xchg_release(&test_var, 0));
15371b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(cmpxchg(&test_var, 0,  0));
15471b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(cmpxchg_release(&test_var, 0,  0));
15571b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(atomic_set_release(&dummy, 0));
15671b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(atomic_add_return(1, &dummy));
15771b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(atomic_add_return_release(1, &dummy));
15871b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(atomic_fetch_add(1, &dummy));
15971b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(atomic_fetch_add_release(1, &dummy));
16071b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(test_and_set_bit(0, &test_var));
16171b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(test_and_clear_bit(0, &test_var));
16271b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(test_and_change_bit(0, &test_var));
16371b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(clear_bit_unlock(0, &test_var));
16471b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(__clear_bit_unlock(0, &test_var));
16571b0e3aeSMarco Elver 	arch_spin_lock(&arch_spinlock);
16671b0e3aeSMarco Elver 	KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
167a70d36e6SMarco Elver 	spin_lock(&test_spinlock);
168a70d36e6SMarco Elver 	KCSAN_CHECK_READ_BARRIER(spin_unlock(&test_spinlock));
16971b0e3aeSMarco Elver 
17071b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(mb());
17171b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(wmb());
17271b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_mb());
17371b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_wmb());
17471b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(dma_wmb());
17571b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_mb__before_atomic());
17671b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_atomic());
17771b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
17871b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_store_mb(test_var, 0));
17971b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(smp_store_release(&test_var, 0));
18071b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(xchg(&test_var, 0));
18171b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(xchg_release(&test_var, 0));
18271b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(cmpxchg(&test_var, 0,  0));
18371b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(cmpxchg_release(&test_var, 0,  0));
18471b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(atomic_set_release(&dummy, 0));
18571b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(atomic_add_return(1, &dummy));
18671b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(atomic_add_return_release(1, &dummy));
18771b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add(1, &dummy));
18871b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy));
18971b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(test_and_set_bit(0, &test_var));
19071b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(test_and_clear_bit(0, &test_var));
19171b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(test_and_change_bit(0, &test_var));
19271b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock(0, &test_var));
19371b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(__clear_bit_unlock(0, &test_var));
19471b0e3aeSMarco Elver 	arch_spin_lock(&arch_spinlock);
19571b0e3aeSMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
196a70d36e6SMarco Elver 	spin_lock(&test_spinlock);
197a70d36e6SMarco Elver 	KCSAN_CHECK_WRITE_BARRIER(spin_unlock(&test_spinlock));
19871b0e3aeSMarco Elver 
19971b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(mb());
20071b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(wmb());
20171b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(rmb());
20271b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_mb());
20371b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_wmb());
20471b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_rmb());
20571b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(dma_wmb());
20671b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(dma_rmb());
20771b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_mb__before_atomic());
20871b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_mb__after_atomic());
20971b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
21071b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_store_mb(test_var, 0));
21171b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(smp_store_release(&test_var, 0));
21271b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(xchg(&test_var, 0));
21371b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(xchg_release(&test_var, 0));
21471b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(cmpxchg(&test_var, 0,  0));
21571b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(cmpxchg_release(&test_var, 0,  0));
21671b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(atomic_set_release(&dummy, 0));
21771b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(atomic_add_return(1, &dummy));
21871b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(atomic_add_return_release(1, &dummy));
21971b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(atomic_fetch_add(1, &dummy));
22071b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(atomic_fetch_add_release(1, &dummy));
22171b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(test_and_set_bit(0, &test_var));
22271b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(test_and_clear_bit(0, &test_var));
22371b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(test_and_change_bit(0, &test_var));
22471b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(clear_bit_unlock(0, &test_var));
22571b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(__clear_bit_unlock(0, &test_var));
22671b0e3aeSMarco Elver 	arch_spin_lock(&arch_spinlock);
22771b0e3aeSMarco Elver 	KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
228a70d36e6SMarco Elver 	spin_lock(&test_spinlock);
229a70d36e6SMarco Elver 	KCSAN_CHECK_RW_BARRIER(spin_unlock(&test_spinlock));
230247dbcdbSMatthew Wilcox (Oracle) 	KCSAN_CHECK_RW_BARRIER(xor_unlock_is_negative_byte(1, &test_var));
231247dbcdbSMatthew Wilcox (Oracle) 	KCSAN_CHECK_READ_BARRIER(xor_unlock_is_negative_byte(1, &test_var));
232247dbcdbSMatthew Wilcox (Oracle) 	KCSAN_CHECK_WRITE_BARRIER(xor_unlock_is_negative_byte(1, &test_var));
23371b0e3aeSMarco Elver 	kcsan_nestable_atomic_end();
23471b0e3aeSMarco Elver 
23571b0e3aeSMarco Elver 	return ret;
23671b0e3aeSMarco Elver }
23771b0e3aeSMarco Elver 
kcsan_selftest(void)238dfd402a4SMarco Elver static int __init kcsan_selftest(void)
239dfd402a4SMarco Elver {
240dfd402a4SMarco Elver 	int passed = 0;
241dfd402a4SMarco Elver 	int total = 0;
242dfd402a4SMarco Elver 
243dfd402a4SMarco Elver #define RUN_TEST(do_test)                                                      \
244dfd402a4SMarco Elver 	do {                                                                   \
245dfd402a4SMarco Elver 		++total;                                                       \
246dfd402a4SMarco Elver 		if (do_test())                                                 \
247dfd402a4SMarco Elver 			++passed;                                              \
248dfd402a4SMarco Elver 		else                                                           \
249178a1877SMarco Elver 			pr_err("selftest: " #do_test " failed");               \
250dfd402a4SMarco Elver 	} while (0)
251dfd402a4SMarco Elver 
252dfd402a4SMarco Elver 	RUN_TEST(test_encode_decode);
253dfd402a4SMarco Elver 	RUN_TEST(test_matching_access);
25471b0e3aeSMarco Elver 	RUN_TEST(test_barrier);
255dfd402a4SMarco Elver 
256178a1877SMarco Elver 	pr_info("selftest: %d/%d tests passed\n", passed, total);
257dfd402a4SMarco Elver 	if (passed != total)
258178a1877SMarco Elver 		panic("selftests failed");
259dfd402a4SMarco Elver 	return 0;
260dfd402a4SMarco Elver }
261dfd402a4SMarco Elver postcore_initcall(kcsan_selftest);
262