11fe84fd4SMarco Elver // SPDX-License-Identifier: GPL-2.0
21fe84fd4SMarco Elver /*
31fe84fd4SMarco Elver * KCSAN test with various race scenarious to test runtime behaviour. Since the
41fe84fd4SMarco Elver * interface with which KCSAN's reports are obtained is via the console, this is
51fe84fd4SMarco Elver * the output we should verify. For each test case checks the presence (or
61fe84fd4SMarco Elver * absence) of generated reports. Relies on 'console' tracepoint to capture
71fe84fd4SMarco Elver * reports as they appear in the kernel log.
81fe84fd4SMarco Elver *
91fe84fd4SMarco Elver * Makes use of KUnit for test organization, and the Torture framework for test
101fe84fd4SMarco Elver * thread control.
111fe84fd4SMarco Elver *
121fe84fd4SMarco Elver * Copyright (C) 2020, Google LLC.
131fe84fd4SMarco Elver * Author: Marco Elver <elver@google.com>
141fe84fd4SMarco Elver */
151fe84fd4SMarco Elver
16f6a14914SMarco Elver #define pr_fmt(fmt) "kcsan_test: " fmt
17f6a14914SMarco Elver
181fe84fd4SMarco Elver #include <kunit/test.h>
198bc32b34SMarco Elver #include <linux/atomic.h>
208bc32b34SMarco Elver #include <linux/bitops.h>
211fe84fd4SMarco Elver #include <linux/jiffies.h>
221fe84fd4SMarco Elver #include <linux/kcsan-checks.h>
231fe84fd4SMarco Elver #include <linux/kernel.h>
248bc32b34SMarco Elver #include <linux/mutex.h>
251fe84fd4SMarco Elver #include <linux/sched.h>
261fe84fd4SMarco Elver #include <linux/seqlock.h>
271fe84fd4SMarco Elver #include <linux/spinlock.h>
281fe84fd4SMarco Elver #include <linux/string.h>
291fe84fd4SMarco Elver #include <linux/timer.h>
301fe84fd4SMarco Elver #include <linux/torture.h>
311fe84fd4SMarco Elver #include <linux/tracepoint.h>
321fe84fd4SMarco Elver #include <linux/types.h>
331fe84fd4SMarco Elver #include <trace/events/printk.h>
341fe84fd4SMarco Elver
3580804284SMarco Elver #define KCSAN_TEST_REQUIRES(test, cond) do { \
3680804284SMarco Elver if (!(cond)) \
3780804284SMarco Elver kunit_skip((test), "Test requires: " #cond); \
3880804284SMarco Elver } while (0)
3980804284SMarco Elver
40bec4a247SMarco Elver #ifdef CONFIG_CC_HAS_TSAN_COMPOUND_READ_BEFORE_WRITE
41bec4a247SMarco Elver #define __KCSAN_ACCESS_RW(alt) (KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE)
42bec4a247SMarco Elver #else
43bec4a247SMarco Elver #define __KCSAN_ACCESS_RW(alt) (alt)
44bec4a247SMarco Elver #endif
45bec4a247SMarco Elver
461fe84fd4SMarco Elver /* Points to current test-case memory access "kernels". */
471fe84fd4SMarco Elver static void (*access_kernels[2])(void);
481fe84fd4SMarco Elver
491fe84fd4SMarco Elver static struct task_struct **threads; /* Lists of threads. */
501fe84fd4SMarco Elver static unsigned long end_time; /* End time of test. */
511fe84fd4SMarco Elver
521fe84fd4SMarco Elver /* Report as observed from console. */
531fe84fd4SMarco Elver static struct {
541fe84fd4SMarco Elver spinlock_t lock;
551fe84fd4SMarco Elver int nlines;
561fe84fd4SMarco Elver char lines[3][512];
571fe84fd4SMarco Elver } observed = {
581fe84fd4SMarco Elver .lock = __SPIN_LOCK_UNLOCKED(observed.lock),
591fe84fd4SMarco Elver };
601fe84fd4SMarco Elver
611fe84fd4SMarco Elver /* Setup test checking loop. */
622888557fSMarco Elver static __no_kcsan inline void
begin_test_checks(void (* func1)(void),void (* func2)(void))631fe84fd4SMarco Elver begin_test_checks(void (*func1)(void), void (*func2)(void))
641fe84fd4SMarco Elver {
651fe84fd4SMarco Elver kcsan_disable_current();
661fe84fd4SMarco Elver
671fe84fd4SMarco Elver /*
681fe84fd4SMarco Elver * Require at least as long as KCSAN_REPORT_ONCE_IN_MS, to ensure at
691fe84fd4SMarco Elver * least one race is reported.
701fe84fd4SMarco Elver */
711fe84fd4SMarco Elver end_time = jiffies + msecs_to_jiffies(CONFIG_KCSAN_REPORT_ONCE_IN_MS + 500);
721fe84fd4SMarco Elver
731fe84fd4SMarco Elver /* Signal start; release potential initialization of shared data. */
741fe84fd4SMarco Elver smp_store_release(&access_kernels[0], func1);
751fe84fd4SMarco Elver smp_store_release(&access_kernels[1], func2);
761fe84fd4SMarco Elver }
771fe84fd4SMarco Elver
781fe84fd4SMarco Elver /* End test checking loop. */
792888557fSMarco Elver static __no_kcsan inline bool
end_test_checks(bool stop)801fe84fd4SMarco Elver end_test_checks(bool stop)
811fe84fd4SMarco Elver {
821fe84fd4SMarco Elver if (!stop && time_before(jiffies, end_time)) {
831fe84fd4SMarco Elver /* Continue checking */
841fe84fd4SMarco Elver might_sleep();
851fe84fd4SMarco Elver return false;
861fe84fd4SMarco Elver }
871fe84fd4SMarco Elver
881fe84fd4SMarco Elver kcsan_enable_current();
891fe84fd4SMarco Elver return true;
901fe84fd4SMarco Elver }
911fe84fd4SMarco Elver
921fe84fd4SMarco Elver /*
931fe84fd4SMarco Elver * Probe for console output: checks if a race was reported, and obtains observed
941fe84fd4SMarco Elver * lines of interest.
951fe84fd4SMarco Elver */
961fe84fd4SMarco Elver __no_kcsan
probe_console(void * ignore,const char * buf,size_t len)971fe84fd4SMarco Elver static void probe_console(void *ignore, const char *buf, size_t len)
981fe84fd4SMarco Elver {
991fe84fd4SMarco Elver unsigned long flags;
1001fe84fd4SMarco Elver int nlines;
1011fe84fd4SMarco Elver
1021fe84fd4SMarco Elver /*
1031fe84fd4SMarco Elver * Note that KCSAN reports under a global lock, so we do not risk the
1041fe84fd4SMarco Elver * possibility of having multiple reports interleaved. If that were the
1051fe84fd4SMarco Elver * case, we'd expect tests to fail.
1061fe84fd4SMarco Elver */
1071fe84fd4SMarco Elver
1081fe84fd4SMarco Elver spin_lock_irqsave(&observed.lock, flags);
1091fe84fd4SMarco Elver nlines = observed.nlines;
1101fe84fd4SMarco Elver
1111fe84fd4SMarco Elver if (strnstr(buf, "BUG: KCSAN: ", len) && strnstr(buf, "test_", len)) {
1121fe84fd4SMarco Elver /*
1131fe84fd4SMarco Elver * KCSAN report and related to the test.
1141fe84fd4SMarco Elver *
1151fe84fd4SMarco Elver * The provided @buf is not NUL-terminated; copy no more than
1161fe84fd4SMarco Elver * @len bytes and let strscpy() add the missing NUL-terminator.
1171fe84fd4SMarco Elver */
1181fe84fd4SMarco Elver strscpy(observed.lines[0], buf, min(len + 1, sizeof(observed.lines[0])));
1191fe84fd4SMarco Elver nlines = 1;
1201fe84fd4SMarco Elver } else if ((nlines == 1 || nlines == 2) && strnstr(buf, "bytes by", len)) {
1211fe84fd4SMarco Elver strscpy(observed.lines[nlines++], buf, min(len + 1, sizeof(observed.lines[0])));
1221fe84fd4SMarco Elver
1231fe84fd4SMarco Elver if (strnstr(buf, "race at unknown origin", len)) {
1241fe84fd4SMarco Elver if (WARN_ON(nlines != 2))
1251fe84fd4SMarco Elver goto out;
1261fe84fd4SMarco Elver
1271fe84fd4SMarco Elver /* No second line of interest. */
1281fe84fd4SMarco Elver strcpy(observed.lines[nlines++], "<none>");
1291fe84fd4SMarco Elver }
1301fe84fd4SMarco Elver }
1311fe84fd4SMarco Elver
1321fe84fd4SMarco Elver out:
1331fe84fd4SMarco Elver WRITE_ONCE(observed.nlines, nlines); /* Publish new nlines. */
1341fe84fd4SMarco Elver spin_unlock_irqrestore(&observed.lock, flags);
1351fe84fd4SMarco Elver }
1361fe84fd4SMarco Elver
1371fe84fd4SMarco Elver /* Check if a report related to the test exists. */
1381fe84fd4SMarco Elver __no_kcsan
report_available(void)1391fe84fd4SMarco Elver static bool report_available(void)
1401fe84fd4SMarco Elver {
1411fe84fd4SMarco Elver return READ_ONCE(observed.nlines) == ARRAY_SIZE(observed.lines);
1421fe84fd4SMarco Elver }
1431fe84fd4SMarco Elver
1441fe84fd4SMarco Elver /* Report information we expect in a report. */
1451fe84fd4SMarco Elver struct expect_report {
1461fe84fd4SMarco Elver /* Access information of both accesses. */
1471fe84fd4SMarco Elver struct {
1481fe84fd4SMarco Elver void *fn; /* Function pointer to expected function of top frame. */
1491fe84fd4SMarco Elver void *addr; /* Address of access; unchecked if NULL. */
1501fe84fd4SMarco Elver size_t size; /* Size of access; unchecked if @addr is NULL. */
1511fe84fd4SMarco Elver int type; /* Access type, see KCSAN_ACCESS definitions. */
1521fe84fd4SMarco Elver } access[2];
1531fe84fd4SMarco Elver };
1541fe84fd4SMarco Elver
1551fe84fd4SMarco Elver /* Check observed report matches information in @r. */
1561fe84fd4SMarco Elver __no_kcsan
__report_matches(const struct expect_report * r)1577310bd1fSMarco Elver static bool __report_matches(const struct expect_report *r)
1581fe84fd4SMarco Elver {
1591fe84fd4SMarco Elver const bool is_assert = (r->access[0].type | r->access[1].type) & KCSAN_ACCESS_ASSERT;
1601fe84fd4SMarco Elver bool ret = false;
1611fe84fd4SMarco Elver unsigned long flags;
1625b24ac2dSMax Filippov typeof(*observed.lines) *expect;
1631fe84fd4SMarco Elver const char *end;
1641fe84fd4SMarco Elver char *cur;
1651fe84fd4SMarco Elver int i;
1661fe84fd4SMarco Elver
1671fe84fd4SMarco Elver /* Doubled-checked locking. */
1681fe84fd4SMarco Elver if (!report_available())
1691fe84fd4SMarco Elver return false;
1701fe84fd4SMarco Elver
1715b24ac2dSMax Filippov expect = kmalloc(sizeof(observed.lines), GFP_KERNEL);
1725b24ac2dSMax Filippov if (WARN_ON(!expect))
1735b24ac2dSMax Filippov return false;
1745b24ac2dSMax Filippov
1751fe84fd4SMarco Elver /* Generate expected report contents. */
1761fe84fd4SMarco Elver
1771fe84fd4SMarco Elver /* Title */
1781fe84fd4SMarco Elver cur = expect[0];
1791fe84fd4SMarco Elver end = &expect[0][sizeof(expect[0]) - 1];
1801fe84fd4SMarco Elver cur += scnprintf(cur, end - cur, "BUG: KCSAN: %s in ",
1811fe84fd4SMarco Elver is_assert ? "assert: race" : "data-race");
1821fe84fd4SMarco Elver if (r->access[1].fn) {
1831fe84fd4SMarco Elver char tmp[2][64];
1841fe84fd4SMarco Elver int cmp;
1851fe84fd4SMarco Elver
1861fe84fd4SMarco Elver /* Expect lexographically sorted function names in title. */
1871fe84fd4SMarco Elver scnprintf(tmp[0], sizeof(tmp[0]), "%pS", r->access[0].fn);
1881fe84fd4SMarco Elver scnprintf(tmp[1], sizeof(tmp[1]), "%pS", r->access[1].fn);
1891fe84fd4SMarco Elver cmp = strcmp(tmp[0], tmp[1]);
1901fe84fd4SMarco Elver cur += scnprintf(cur, end - cur, "%ps / %ps",
1911fe84fd4SMarco Elver cmp < 0 ? r->access[0].fn : r->access[1].fn,
1921fe84fd4SMarco Elver cmp < 0 ? r->access[1].fn : r->access[0].fn);
1931fe84fd4SMarco Elver } else {
1941fe84fd4SMarco Elver scnprintf(cur, end - cur, "%pS", r->access[0].fn);
1951fe84fd4SMarco Elver /* The exact offset won't match, remove it. */
1961fe84fd4SMarco Elver cur = strchr(expect[0], '+');
1971fe84fd4SMarco Elver if (cur)
1981fe84fd4SMarco Elver *cur = '\0';
1991fe84fd4SMarco Elver }
2001fe84fd4SMarco Elver
2011fe84fd4SMarco Elver /* Access 1 */
2021fe84fd4SMarco Elver cur = expect[1];
2031fe84fd4SMarco Elver end = &expect[1][sizeof(expect[1]) - 1];
2041fe84fd4SMarco Elver if (!r->access[1].fn)
2051fe84fd4SMarco Elver cur += scnprintf(cur, end - cur, "race at unknown origin, with ");
2061fe84fd4SMarco Elver
2071fe84fd4SMarco Elver /* Access 1 & 2 */
2081fe84fd4SMarco Elver for (i = 0; i < 2; ++i) {
209bec4a247SMarco Elver const int ty = r->access[i].type;
2101fe84fd4SMarco Elver const char *const access_type =
211bec4a247SMarco Elver (ty & KCSAN_ACCESS_ASSERT) ?
212bec4a247SMarco Elver ((ty & KCSAN_ACCESS_WRITE) ?
2131fe84fd4SMarco Elver "assert no accesses" :
2141fe84fd4SMarco Elver "assert no writes") :
215bec4a247SMarco Elver ((ty & KCSAN_ACCESS_WRITE) ?
216bec4a247SMarco Elver ((ty & KCSAN_ACCESS_COMPOUND) ?
217bec4a247SMarco Elver "read-write" :
218bec4a247SMarco Elver "write") :
2191fe84fd4SMarco Elver "read");
220d627c537SMarco Elver const bool is_atomic = (ty & KCSAN_ACCESS_ATOMIC);
221d627c537SMarco Elver const bool is_scoped = (ty & KCSAN_ACCESS_SCOPED);
2221fe84fd4SMarco Elver const char *const access_type_aux =
2233cc21a53SMarco Elver (is_atomic && is_scoped) ? " (marked, reordered)"
224d627c537SMarco Elver : (is_atomic ? " (marked)"
2253cc21a53SMarco Elver : (is_scoped ? " (reordered)" : ""));
2261fe84fd4SMarco Elver
2271fe84fd4SMarco Elver if (i == 1) {
2281fe84fd4SMarco Elver /* Access 2 */
2291fe84fd4SMarco Elver cur = expect[2];
2301fe84fd4SMarco Elver end = &expect[2][sizeof(expect[2]) - 1];
2311fe84fd4SMarco Elver
2321fe84fd4SMarco Elver if (!r->access[1].fn) {
2331fe84fd4SMarco Elver /* Dummy string if no second access is available. */
2341fe84fd4SMarco Elver strcpy(cur, "<none>");
2351fe84fd4SMarco Elver break;
2361fe84fd4SMarco Elver }
2371fe84fd4SMarco Elver }
2381fe84fd4SMarco Elver
2391fe84fd4SMarco Elver cur += scnprintf(cur, end - cur, "%s%s to ", access_type,
2401fe84fd4SMarco Elver access_type_aux);
2411fe84fd4SMarco Elver
2421fe84fd4SMarco Elver if (r->access[i].addr) /* Address is optional. */
2431fe84fd4SMarco Elver cur += scnprintf(cur, end - cur, "0x%px of %zu bytes",
2441fe84fd4SMarco Elver r->access[i].addr, r->access[i].size);
2451fe84fd4SMarco Elver }
2461fe84fd4SMarco Elver
2471fe84fd4SMarco Elver spin_lock_irqsave(&observed.lock, flags);
2481fe84fd4SMarco Elver if (!report_available())
2491fe84fd4SMarco Elver goto out; /* A new report is being captured. */
2501fe84fd4SMarco Elver
2511fe84fd4SMarco Elver /* Finally match expected output to what we actually observed. */
2521fe84fd4SMarco Elver ret = strstr(observed.lines[0], expect[0]) &&
2531fe84fd4SMarco Elver /* Access info may appear in any order. */
2541fe84fd4SMarco Elver ((strstr(observed.lines[1], expect[1]) &&
2551fe84fd4SMarco Elver strstr(observed.lines[2], expect[2])) ||
2561fe84fd4SMarco Elver (strstr(observed.lines[1], expect[2]) &&
2571fe84fd4SMarco Elver strstr(observed.lines[2], expect[1])));
2581fe84fd4SMarco Elver out:
2591fe84fd4SMarco Elver spin_unlock_irqrestore(&observed.lock, flags);
2605b24ac2dSMax Filippov kfree(expect);
2611fe84fd4SMarco Elver return ret;
2621fe84fd4SMarco Elver }
2631fe84fd4SMarco Elver
2647310bd1fSMarco Elver static __always_inline const struct expect_report *
__report_set_scoped(struct expect_report * r,int accesses)2657310bd1fSMarco Elver __report_set_scoped(struct expect_report *r, int accesses)
2667310bd1fSMarco Elver {
2677310bd1fSMarco Elver BUILD_BUG_ON(accesses > 3);
2687310bd1fSMarco Elver
2697310bd1fSMarco Elver if (accesses & 1)
2707310bd1fSMarco Elver r->access[0].type |= KCSAN_ACCESS_SCOPED;
2717310bd1fSMarco Elver else
2727310bd1fSMarco Elver r->access[0].type &= ~KCSAN_ACCESS_SCOPED;
2737310bd1fSMarco Elver
2747310bd1fSMarco Elver if (accesses & 2)
2757310bd1fSMarco Elver r->access[1].type |= KCSAN_ACCESS_SCOPED;
2767310bd1fSMarco Elver else
2777310bd1fSMarco Elver r->access[1].type &= ~KCSAN_ACCESS_SCOPED;
2787310bd1fSMarco Elver
2797310bd1fSMarco Elver return r;
2807310bd1fSMarco Elver }
2817310bd1fSMarco Elver
2827310bd1fSMarco Elver __no_kcsan
report_matches_any_reordered(struct expect_report * r)2837310bd1fSMarco Elver static bool report_matches_any_reordered(struct expect_report *r)
2847310bd1fSMarco Elver {
2857310bd1fSMarco Elver return __report_matches(__report_set_scoped(r, 0)) ||
2867310bd1fSMarco Elver __report_matches(__report_set_scoped(r, 1)) ||
2877310bd1fSMarco Elver __report_matches(__report_set_scoped(r, 2)) ||
2887310bd1fSMarco Elver __report_matches(__report_set_scoped(r, 3));
2897310bd1fSMarco Elver }
2907310bd1fSMarco Elver
2917310bd1fSMarco Elver #ifdef CONFIG_KCSAN_WEAK_MEMORY
2927310bd1fSMarco Elver /* Due to reordering accesses, any access may appear as "(reordered)". */
2937310bd1fSMarco Elver #define report_matches report_matches_any_reordered
2947310bd1fSMarco Elver #else
2957310bd1fSMarco Elver #define report_matches __report_matches
2967310bd1fSMarco Elver #endif
2977310bd1fSMarco Elver
2981fe84fd4SMarco Elver /* ===== Test kernels ===== */
2991fe84fd4SMarco Elver
3001fe84fd4SMarco Elver static long test_sink;
3011fe84fd4SMarco Elver static long test_var;
3021fe84fd4SMarco Elver /* @test_array should be large enough to fall into multiple watchpoint slots. */
3031fe84fd4SMarco Elver static long test_array[3 * PAGE_SIZE / sizeof(long)];
3041fe84fd4SMarco Elver static struct {
3051fe84fd4SMarco Elver long val[8];
3061fe84fd4SMarco Elver } test_struct;
30731f605a3SMarco Elver static long __data_racy test_data_racy;
3081fe84fd4SMarco Elver static DEFINE_SEQLOCK(test_seqlock);
309a70d36e6SMarco Elver static DEFINE_SPINLOCK(test_spinlock);
310a70d36e6SMarco Elver static DEFINE_MUTEX(test_mutex);
3111fe84fd4SMarco Elver
3121fe84fd4SMarco Elver /*
3131fe84fd4SMarco Elver * Helper to avoid compiler optimizing out reads, and to generate source values
3141fe84fd4SMarco Elver * for writes.
3151fe84fd4SMarco Elver */
3161fe84fd4SMarco Elver __no_kcsan
sink_value(long v)3171fe84fd4SMarco Elver static noinline void sink_value(long v) { WRITE_ONCE(test_sink, v); }
3181fe84fd4SMarco Elver
3198bc32b34SMarco Elver /*
3208bc32b34SMarco Elver * Generates a delay and some accesses that enter the runtime but do not produce
3218bc32b34SMarco Elver * data races.
3228bc32b34SMarco Elver */
test_delay(int iter)3238bc32b34SMarco Elver static noinline void test_delay(int iter)
3248bc32b34SMarco Elver {
3258bc32b34SMarco Elver while (iter--)
3268bc32b34SMarco Elver sink_value(READ_ONCE(test_sink));
3278bc32b34SMarco Elver }
3288bc32b34SMarco Elver
test_kernel_read(void)3291fe84fd4SMarco Elver static noinline void test_kernel_read(void) { sink_value(test_var); }
3301fe84fd4SMarco Elver
test_kernel_write(void)3311fe84fd4SMarco Elver static noinline void test_kernel_write(void)
3321fe84fd4SMarco Elver {
3331fe84fd4SMarco Elver test_var = READ_ONCE_NOCHECK(test_sink) + 1;
3341fe84fd4SMarco Elver }
3351fe84fd4SMarco Elver
test_kernel_write_nochange(void)3361fe84fd4SMarco Elver static noinline void test_kernel_write_nochange(void) { test_var = 42; }
3371fe84fd4SMarco Elver
3381fe84fd4SMarco Elver /* Suffixed by value-change exception filter. */
test_kernel_write_nochange_rcu(void)3391fe84fd4SMarco Elver static noinline void test_kernel_write_nochange_rcu(void) { test_var = 42; }
3401fe84fd4SMarco Elver
test_kernel_read_atomic(void)3411fe84fd4SMarco Elver static noinline void test_kernel_read_atomic(void)
3421fe84fd4SMarco Elver {
3431fe84fd4SMarco Elver sink_value(READ_ONCE(test_var));
3441fe84fd4SMarco Elver }
3451fe84fd4SMarco Elver
test_kernel_write_atomic(void)3461fe84fd4SMarco Elver static noinline void test_kernel_write_atomic(void)
3471fe84fd4SMarco Elver {
3481fe84fd4SMarco Elver WRITE_ONCE(test_var, READ_ONCE_NOCHECK(test_sink) + 1);
3491fe84fd4SMarco Elver }
3501fe84fd4SMarco Elver
test_kernel_atomic_rmw(void)351bec4a247SMarco Elver static noinline void test_kernel_atomic_rmw(void)
352bec4a247SMarco Elver {
353bec4a247SMarco Elver /* Use builtin, so we can set up the "bad" atomic/non-atomic scenario. */
354bec4a247SMarco Elver __atomic_fetch_add(&test_var, 1, __ATOMIC_RELAXED);
355bec4a247SMarco Elver }
356bec4a247SMarco Elver
3571fe84fd4SMarco Elver __no_kcsan
test_kernel_write_uninstrumented(void)3581fe84fd4SMarco Elver static noinline void test_kernel_write_uninstrumented(void) { test_var++; }
3591fe84fd4SMarco Elver
test_kernel_data_race(void)3601fe84fd4SMarco Elver static noinline void test_kernel_data_race(void) { data_race(test_var++); }
3611fe84fd4SMarco Elver
test_kernel_data_racy_qualifier(void)36231f605a3SMarco Elver static noinline void test_kernel_data_racy_qualifier(void) { test_data_racy++; }
36331f605a3SMarco Elver
test_kernel_assert_writer(void)3641fe84fd4SMarco Elver static noinline void test_kernel_assert_writer(void)
3651fe84fd4SMarco Elver {
3661fe84fd4SMarco Elver ASSERT_EXCLUSIVE_WRITER(test_var);
3671fe84fd4SMarco Elver }
3681fe84fd4SMarco Elver
test_kernel_assert_access(void)3691fe84fd4SMarco Elver static noinline void test_kernel_assert_access(void)
3701fe84fd4SMarco Elver {
3711fe84fd4SMarco Elver ASSERT_EXCLUSIVE_ACCESS(test_var);
3721fe84fd4SMarco Elver }
3731fe84fd4SMarco Elver
3741fe84fd4SMarco Elver #define TEST_CHANGE_BITS 0xff00ff00
3751fe84fd4SMarco Elver
test_kernel_change_bits(void)3761fe84fd4SMarco Elver static noinline void test_kernel_change_bits(void)
3771fe84fd4SMarco Elver {
3781fe84fd4SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) {
3791fe84fd4SMarco Elver /*
3801fe84fd4SMarco Elver * Avoid race of unknown origin for this test, just pretend they
3811fe84fd4SMarco Elver * are atomic.
3821fe84fd4SMarco Elver */
3831fe84fd4SMarco Elver kcsan_nestable_atomic_begin();
3841fe84fd4SMarco Elver test_var ^= TEST_CHANGE_BITS;
3851fe84fd4SMarco Elver kcsan_nestable_atomic_end();
3861fe84fd4SMarco Elver } else
3871fe84fd4SMarco Elver WRITE_ONCE(test_var, READ_ONCE(test_var) ^ TEST_CHANGE_BITS);
3881fe84fd4SMarco Elver }
3891fe84fd4SMarco Elver
test_kernel_assert_bits_change(void)3901fe84fd4SMarco Elver static noinline void test_kernel_assert_bits_change(void)
3911fe84fd4SMarco Elver {
3921fe84fd4SMarco Elver ASSERT_EXCLUSIVE_BITS(test_var, TEST_CHANGE_BITS);
3931fe84fd4SMarco Elver }
3941fe84fd4SMarco Elver
test_kernel_assert_bits_nochange(void)3951fe84fd4SMarco Elver static noinline void test_kernel_assert_bits_nochange(void)
3961fe84fd4SMarco Elver {
3971fe84fd4SMarco Elver ASSERT_EXCLUSIVE_BITS(test_var, ~TEST_CHANGE_BITS);
3981fe84fd4SMarco Elver }
3991fe84fd4SMarco Elver
4006c65eb75SMarco Elver /*
4016c65eb75SMarco Elver * Scoped assertions do trigger anywhere in scope. However, the report should
4026c65eb75SMarco Elver * still only point at the start of the scope.
4036c65eb75SMarco Elver */
test_enter_scope(void)4041fe84fd4SMarco Elver static noinline void test_enter_scope(void)
4051fe84fd4SMarco Elver {
4061fe84fd4SMarco Elver int x = 0;
4071fe84fd4SMarco Elver
4081fe84fd4SMarco Elver /* Unrelated accesses to scoped assert. */
4091fe84fd4SMarco Elver READ_ONCE(test_sink);
4101fe84fd4SMarco Elver kcsan_check_read(&x, sizeof(x));
4111fe84fd4SMarco Elver }
4121fe84fd4SMarco Elver
test_kernel_assert_writer_scoped(void)4131fe84fd4SMarco Elver static noinline void test_kernel_assert_writer_scoped(void)
4141fe84fd4SMarco Elver {
4151fe84fd4SMarco Elver ASSERT_EXCLUSIVE_WRITER_SCOPED(test_var);
4161fe84fd4SMarco Elver test_enter_scope();
4171fe84fd4SMarco Elver }
4181fe84fd4SMarco Elver
test_kernel_assert_access_scoped(void)4191fe84fd4SMarco Elver static noinline void test_kernel_assert_access_scoped(void)
4201fe84fd4SMarco Elver {
4211fe84fd4SMarco Elver ASSERT_EXCLUSIVE_ACCESS_SCOPED(test_var);
4221fe84fd4SMarco Elver test_enter_scope();
4231fe84fd4SMarco Elver }
4241fe84fd4SMarco Elver
test_kernel_rmw_array(void)4251fe84fd4SMarco Elver static noinline void test_kernel_rmw_array(void)
4261fe84fd4SMarco Elver {
4271fe84fd4SMarco Elver int i;
4281fe84fd4SMarco Elver
4291fe84fd4SMarco Elver for (i = 0; i < ARRAY_SIZE(test_array); ++i)
4301fe84fd4SMarco Elver test_array[i]++;
4311fe84fd4SMarco Elver }
4321fe84fd4SMarco Elver
test_kernel_write_struct(void)4331fe84fd4SMarco Elver static noinline void test_kernel_write_struct(void)
4341fe84fd4SMarco Elver {
4351fe84fd4SMarco Elver kcsan_check_write(&test_struct, sizeof(test_struct));
4361fe84fd4SMarco Elver kcsan_disable_current();
4371fe84fd4SMarco Elver test_struct.val[3]++; /* induce value change */
4381fe84fd4SMarco Elver kcsan_enable_current();
4391fe84fd4SMarco Elver }
4401fe84fd4SMarco Elver
test_kernel_write_struct_part(void)4411fe84fd4SMarco Elver static noinline void test_kernel_write_struct_part(void)
4421fe84fd4SMarco Elver {
4431fe84fd4SMarco Elver test_struct.val[3] = 42;
4441fe84fd4SMarco Elver }
4451fe84fd4SMarco Elver
test_kernel_read_struct_zero_size(void)4461fe84fd4SMarco Elver static noinline void test_kernel_read_struct_zero_size(void)
4471fe84fd4SMarco Elver {
4481fe84fd4SMarco Elver kcsan_check_read(&test_struct.val[3], 0);
4491fe84fd4SMarco Elver }
4501fe84fd4SMarco Elver
test_kernel_jiffies_reader(void)45156b031f0SMarco Elver static noinline void test_kernel_jiffies_reader(void)
45256b031f0SMarco Elver {
45356b031f0SMarco Elver sink_value((long)jiffies);
45456b031f0SMarco Elver }
45556b031f0SMarco Elver
test_kernel_seqlock_reader(void)4561fe84fd4SMarco Elver static noinline void test_kernel_seqlock_reader(void)
4571fe84fd4SMarco Elver {
4581fe84fd4SMarco Elver unsigned int seq;
4591fe84fd4SMarco Elver
4601fe84fd4SMarco Elver do {
4611fe84fd4SMarco Elver seq = read_seqbegin(&test_seqlock);
4621fe84fd4SMarco Elver sink_value(test_var);
4631fe84fd4SMarco Elver } while (read_seqretry(&test_seqlock, seq));
4641fe84fd4SMarco Elver }
4651fe84fd4SMarco Elver
test_kernel_seqlock_writer(void)4661fe84fd4SMarco Elver static noinline void test_kernel_seqlock_writer(void)
4671fe84fd4SMarco Elver {
4681fe84fd4SMarco Elver unsigned long flags;
4691fe84fd4SMarco Elver
4701fe84fd4SMarco Elver write_seqlock_irqsave(&test_seqlock, flags);
4711fe84fd4SMarco Elver test_var++;
4721fe84fd4SMarco Elver write_sequnlock_irqrestore(&test_seqlock, flags);
4731fe84fd4SMarco Elver }
4741fe84fd4SMarco Elver
test_kernel_atomic_builtins(void)475f9ea6319SMarco Elver static noinline void test_kernel_atomic_builtins(void)
476f9ea6319SMarco Elver {
477f9ea6319SMarco Elver /*
478f9ea6319SMarco Elver * Generate concurrent accesses, expecting no reports, ensuring KCSAN
479f9ea6319SMarco Elver * treats builtin atomics as actually atomic.
480f9ea6319SMarco Elver */
481f9ea6319SMarco Elver __atomic_load_n(&test_var, __ATOMIC_RELAXED);
482f9ea6319SMarco Elver }
483f9ea6319SMarco Elver
test_kernel_xor_1bit(void)484d8fd74d3SMarco Elver static noinline void test_kernel_xor_1bit(void)
485d8fd74d3SMarco Elver {
486d8fd74d3SMarco Elver /* Do not report data races between the read-writes. */
487d8fd74d3SMarco Elver kcsan_nestable_atomic_begin();
488d8fd74d3SMarco Elver test_var ^= 0x10000;
489d8fd74d3SMarco Elver kcsan_nestable_atomic_end();
490d8fd74d3SMarco Elver }
491d8fd74d3SMarco Elver
4928bc32b34SMarco Elver #define TEST_KERNEL_LOCKED(name, acquire, release) \
4938bc32b34SMarco Elver static noinline void test_kernel_##name(void) \
4948bc32b34SMarco Elver { \
4958bc32b34SMarco Elver long *flag = &test_struct.val[0]; \
4968bc32b34SMarco Elver long v = 0; \
4978bc32b34SMarco Elver if (!(acquire)) \
4988bc32b34SMarco Elver return; \
4998bc32b34SMarco Elver while (v++ < 100) { \
5008bc32b34SMarco Elver test_var++; \
5018bc32b34SMarco Elver barrier(); \
5028bc32b34SMarco Elver } \
5038bc32b34SMarco Elver release; \
5048bc32b34SMarco Elver test_delay(10); \
5058bc32b34SMarco Elver }
5068bc32b34SMarco Elver
5078bc32b34SMarco Elver TEST_KERNEL_LOCKED(with_memorder,
5088bc32b34SMarco Elver cmpxchg_acquire(flag, 0, 1) == 0,
5098bc32b34SMarco Elver smp_store_release(flag, 0));
5108bc32b34SMarco Elver TEST_KERNEL_LOCKED(wrong_memorder,
5118bc32b34SMarco Elver cmpxchg_relaxed(flag, 0, 1) == 0,
5128bc32b34SMarco Elver WRITE_ONCE(*flag, 0));
5138bc32b34SMarco Elver TEST_KERNEL_LOCKED(atomic_builtin_with_memorder,
5148bc32b34SMarco Elver __atomic_compare_exchange_n(flag, &v, 1, 0, __ATOMIC_ACQUIRE, __ATOMIC_RELAXED),
5158bc32b34SMarco Elver __atomic_store_n(flag, 0, __ATOMIC_RELEASE));
5168bc32b34SMarco Elver TEST_KERNEL_LOCKED(atomic_builtin_wrong_memorder,
5178bc32b34SMarco Elver __atomic_compare_exchange_n(flag, &v, 1, 0, __ATOMIC_RELAXED, __ATOMIC_RELAXED),
5188bc32b34SMarco Elver __atomic_store_n(flag, 0, __ATOMIC_RELAXED));
5198bc32b34SMarco Elver
5201fe84fd4SMarco Elver /* ===== Test cases ===== */
5211fe84fd4SMarco Elver
5228bc32b34SMarco Elver /*
5238bc32b34SMarco Elver * Tests that various barriers have the expected effect on internal state. Not
5248bc32b34SMarco Elver * exhaustive on atomic_t operations. Unlike the selftest, also checks for
5258bc32b34SMarco Elver * too-strict barrier instrumentation; these can be tolerated, because it does
5268bc32b34SMarco Elver * not cause false positives, but at least we should be aware of such cases.
5278bc32b34SMarco Elver */
test_barrier_nothreads(struct kunit * test)5288bc32b34SMarco Elver static void test_barrier_nothreads(struct kunit *test)
5298bc32b34SMarco Elver {
5308bc32b34SMarco Elver #ifdef CONFIG_KCSAN_WEAK_MEMORY
5318bc32b34SMarco Elver struct kcsan_scoped_access *reorder_access = ¤t->kcsan_ctx.reorder_access;
5328bc32b34SMarco Elver #else
5338bc32b34SMarco Elver struct kcsan_scoped_access *reorder_access = NULL;
5348bc32b34SMarco Elver #endif
5358bc32b34SMarco Elver arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
5368bc32b34SMarco Elver atomic_t dummy = ATOMIC_INIT(0);
5378bc32b34SMarco Elver
5388bc32b34SMarco Elver KCSAN_TEST_REQUIRES(test, reorder_access != NULL);
5398bc32b34SMarco Elver KCSAN_TEST_REQUIRES(test, IS_ENABLED(CONFIG_SMP));
5408bc32b34SMarco Elver
5418bc32b34SMarco Elver #define __KCSAN_EXPECT_BARRIER(access_type, barrier, order_before, name) \
5428bc32b34SMarco Elver do { \
5438bc32b34SMarco Elver reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED; \
5448bc32b34SMarco Elver reorder_access->size = sizeof(test_var); \
5458bc32b34SMarco Elver barrier; \
5468bc32b34SMarco Elver KUNIT_EXPECT_EQ_MSG(test, reorder_access->size, \
5478bc32b34SMarco Elver order_before ? 0 : sizeof(test_var), \
5488bc32b34SMarco Elver "improperly instrumented type=(" #access_type "): " name); \
5498bc32b34SMarco Elver } while (0)
5508bc32b34SMarco Elver #define KCSAN_EXPECT_READ_BARRIER(b, o) __KCSAN_EXPECT_BARRIER(0, b, o, #b)
5518bc32b34SMarco Elver #define KCSAN_EXPECT_WRITE_BARRIER(b, o) __KCSAN_EXPECT_BARRIER(KCSAN_ACCESS_WRITE, b, o, #b)
5528bc32b34SMarco Elver #define KCSAN_EXPECT_RW_BARRIER(b, o) __KCSAN_EXPECT_BARRIER(KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE, b, o, #b)
5538bc32b34SMarco Elver
554a70d36e6SMarco Elver /*
555a70d36e6SMarco Elver * Lockdep initialization can strengthen certain locking operations due
556a70d36e6SMarco Elver * to calling into instrumented files; "warm up" our locks.
557a70d36e6SMarco Elver */
558a70d36e6SMarco Elver spin_lock(&test_spinlock);
559a70d36e6SMarco Elver spin_unlock(&test_spinlock);
560a70d36e6SMarco Elver mutex_lock(&test_mutex);
561a70d36e6SMarco Elver mutex_unlock(&test_mutex);
562a70d36e6SMarco Elver
5638bc32b34SMarco Elver /* Force creating a valid entry in reorder_access first. */
5648bc32b34SMarco Elver test_var = 0;
5658bc32b34SMarco Elver while (test_var++ < 1000000 && reorder_access->size != sizeof(test_var))
5668bc32b34SMarco Elver __kcsan_check_read(&test_var, sizeof(test_var));
5678bc32b34SMarco Elver KUNIT_ASSERT_EQ(test, reorder_access->size, sizeof(test_var));
5688bc32b34SMarco Elver
5698bc32b34SMarco Elver kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
5708bc32b34SMarco Elver
5718bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(mb(), true);
5728bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(wmb(), false);
5738bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(rmb(), true);
5748bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_mb(), true);
5758bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_wmb(), false);
5768bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_rmb(), true);
5778bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(dma_wmb(), false);
5788bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(dma_rmb(), true);
5798bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_mb__before_atomic(), true);
5808bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_mb__after_atomic(), true);
5818bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_mb__after_spinlock(), true);
5828bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_store_mb(test_var, 0), true);
5838bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_load_acquire(&test_var), false);
5848bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(smp_store_release(&test_var, 0), true);
5858bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(xchg(&test_var, 0), true);
5868bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(xchg_release(&test_var, 0), true);
5878bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(xchg_relaxed(&test_var, 0), false);
5888bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(cmpxchg(&test_var, 0, 0), true);
5898bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(cmpxchg_release(&test_var, 0, 0), true);
5908bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(cmpxchg_relaxed(&test_var, 0, 0), false);
5918bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_read(&dummy), false);
5928bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_read_acquire(&dummy), false);
5938bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_set(&dummy, 0), false);
5948bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_set_release(&dummy, 0), true);
5958bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_add(1, &dummy), false);
5968bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_add_return(1, &dummy), true);
5978bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_add_return_acquire(1, &dummy), false);
5988bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_add_return_release(1, &dummy), true);
5998bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_add_return_relaxed(1, &dummy), false);
6008bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_fetch_add(1, &dummy), true);
6018bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_fetch_add_acquire(1, &dummy), false);
6028bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_fetch_add_release(1, &dummy), true);
6038bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(atomic_fetch_add_relaxed(1, &dummy), false);
6048bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(test_and_set_bit(0, &test_var), true);
6058bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(test_and_clear_bit(0, &test_var), true);
6068bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(test_and_change_bit(0, &test_var), true);
6078bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(clear_bit_unlock(0, &test_var), true);
6088bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(__clear_bit_unlock(0, &test_var), true);
6098bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(arch_spin_lock(&arch_spinlock), false);
6108bc32b34SMarco Elver KCSAN_EXPECT_READ_BARRIER(arch_spin_unlock(&arch_spinlock), true);
611a70d36e6SMarco Elver KCSAN_EXPECT_READ_BARRIER(spin_lock(&test_spinlock), false);
612a70d36e6SMarco Elver KCSAN_EXPECT_READ_BARRIER(spin_unlock(&test_spinlock), true);
613a70d36e6SMarco Elver KCSAN_EXPECT_READ_BARRIER(mutex_lock(&test_mutex), false);
614a70d36e6SMarco Elver KCSAN_EXPECT_READ_BARRIER(mutex_unlock(&test_mutex), true);
6158bc32b34SMarco Elver
6168bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(mb(), true);
6178bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(wmb(), true);
6188bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(rmb(), false);
6198bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_mb(), true);
6208bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_wmb(), true);
6218bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_rmb(), false);
6228bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(dma_wmb(), true);
6238bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(dma_rmb(), false);
6248bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_mb__before_atomic(), true);
6258bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_mb__after_atomic(), true);
6268bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_mb__after_spinlock(), true);
6278bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_store_mb(test_var, 0), true);
6288bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_load_acquire(&test_var), false);
6298bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(smp_store_release(&test_var, 0), true);
6308bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(xchg(&test_var, 0), true);
6318bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(xchg_release(&test_var, 0), true);
6328bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(xchg_relaxed(&test_var, 0), false);
6338bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(cmpxchg(&test_var, 0, 0), true);
6348bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(cmpxchg_release(&test_var, 0, 0), true);
6358bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(cmpxchg_relaxed(&test_var, 0, 0), false);
6368bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_read(&dummy), false);
6378bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_read_acquire(&dummy), false);
6388bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_set(&dummy, 0), false);
6398bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_set_release(&dummy, 0), true);
6408bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_add(1, &dummy), false);
6418bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_add_return(1, &dummy), true);
6428bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_add_return_acquire(1, &dummy), false);
6438bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_add_return_release(1, &dummy), true);
6448bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_add_return_relaxed(1, &dummy), false);
6458bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_fetch_add(1, &dummy), true);
6468bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_fetch_add_acquire(1, &dummy), false);
6478bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy), true);
6488bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(atomic_fetch_add_relaxed(1, &dummy), false);
6498bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(test_and_set_bit(0, &test_var), true);
6508bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(test_and_clear_bit(0, &test_var), true);
6518bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(test_and_change_bit(0, &test_var), true);
6528bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(clear_bit_unlock(0, &test_var), true);
6538bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(__clear_bit_unlock(0, &test_var), true);
6548bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(arch_spin_lock(&arch_spinlock), false);
6558bc32b34SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock), true);
656a70d36e6SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(spin_lock(&test_spinlock), false);
657a70d36e6SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(spin_unlock(&test_spinlock), true);
658a70d36e6SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(mutex_lock(&test_mutex), false);
659a70d36e6SMarco Elver KCSAN_EXPECT_WRITE_BARRIER(mutex_unlock(&test_mutex), true);
6608bc32b34SMarco Elver
6618bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(mb(), true);
6628bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(wmb(), true);
6638bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(rmb(), true);
6648bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_mb(), true);
6658bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_wmb(), true);
6668bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_rmb(), true);
6678bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(dma_wmb(), true);
6688bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(dma_rmb(), true);
6698bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_mb__before_atomic(), true);
6708bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_mb__after_atomic(), true);
6718bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_mb__after_spinlock(), true);
6728bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_store_mb(test_var, 0), true);
6738bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_load_acquire(&test_var), false);
6748bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(smp_store_release(&test_var, 0), true);
6758bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(xchg(&test_var, 0), true);
6768bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(xchg_release(&test_var, 0), true);
6778bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(xchg_relaxed(&test_var, 0), false);
6788bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(cmpxchg(&test_var, 0, 0), true);
6798bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(cmpxchg_release(&test_var, 0, 0), true);
6808bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(cmpxchg_relaxed(&test_var, 0, 0), false);
6818bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_read(&dummy), false);
6828bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_read_acquire(&dummy), false);
6838bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_set(&dummy, 0), false);
6848bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_set_release(&dummy, 0), true);
6858bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_add(1, &dummy), false);
6868bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_add_return(1, &dummy), true);
6878bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_add_return_acquire(1, &dummy), false);
6888bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_add_return_release(1, &dummy), true);
6898bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_add_return_relaxed(1, &dummy), false);
6908bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_fetch_add(1, &dummy), true);
6918bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_fetch_add_acquire(1, &dummy), false);
6928bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_fetch_add_release(1, &dummy), true);
6938bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(atomic_fetch_add_relaxed(1, &dummy), false);
6948bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(test_and_set_bit(0, &test_var), true);
6958bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(test_and_clear_bit(0, &test_var), true);
6968bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(test_and_change_bit(0, &test_var), true);
6978bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(clear_bit_unlock(0, &test_var), true);
6988bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(__clear_bit_unlock(0, &test_var), true);
6998bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(arch_spin_lock(&arch_spinlock), false);
7008bc32b34SMarco Elver KCSAN_EXPECT_RW_BARRIER(arch_spin_unlock(&arch_spinlock), true);
701a70d36e6SMarco Elver KCSAN_EXPECT_RW_BARRIER(spin_lock(&test_spinlock), false);
702a70d36e6SMarco Elver KCSAN_EXPECT_RW_BARRIER(spin_unlock(&test_spinlock), true);
703a70d36e6SMarco Elver KCSAN_EXPECT_RW_BARRIER(mutex_lock(&test_mutex), false);
704a70d36e6SMarco Elver KCSAN_EXPECT_RW_BARRIER(mutex_unlock(&test_mutex), true);
705247dbcdbSMatthew Wilcox (Oracle) KCSAN_EXPECT_READ_BARRIER(xor_unlock_is_negative_byte(1, &test_var), true);
706247dbcdbSMatthew Wilcox (Oracle) KCSAN_EXPECT_WRITE_BARRIER(xor_unlock_is_negative_byte(1, &test_var), true);
707247dbcdbSMatthew Wilcox (Oracle) KCSAN_EXPECT_RW_BARRIER(xor_unlock_is_negative_byte(1, &test_var), true);
7088bc32b34SMarco Elver kcsan_nestable_atomic_end();
7098bc32b34SMarco Elver }
7108bc32b34SMarco Elver
7111fe84fd4SMarco Elver /* Simple test with normal data race. */
7121fe84fd4SMarco Elver __no_kcsan
test_basic(struct kunit * test)7131fe84fd4SMarco Elver static void test_basic(struct kunit *test)
7141fe84fd4SMarco Elver {
7157310bd1fSMarco Elver struct expect_report expect = {
7161fe84fd4SMarco Elver .access = {
7171fe84fd4SMarco Elver { test_kernel_write, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
7181fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
7191fe84fd4SMarco Elver },
7201fe84fd4SMarco Elver };
7217310bd1fSMarco Elver struct expect_report never = {
7221fe84fd4SMarco Elver .access = {
7231fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
7241fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
7251fe84fd4SMarco Elver },
7261fe84fd4SMarco Elver };
7271fe84fd4SMarco Elver bool match_expect = false;
7281fe84fd4SMarco Elver bool match_never = false;
7291fe84fd4SMarco Elver
7301fe84fd4SMarco Elver begin_test_checks(test_kernel_write, test_kernel_read);
7311fe84fd4SMarco Elver do {
7321fe84fd4SMarco Elver match_expect |= report_matches(&expect);
7331fe84fd4SMarco Elver match_never = report_matches(&never);
7341fe84fd4SMarco Elver } while (!end_test_checks(match_never));
7351fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
7361fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
7371fe84fd4SMarco Elver }
7381fe84fd4SMarco Elver
7391fe84fd4SMarco Elver /*
7401fe84fd4SMarco Elver * Stress KCSAN with lots of concurrent races on different addresses until
7411fe84fd4SMarco Elver * timeout.
7421fe84fd4SMarco Elver */
7431fe84fd4SMarco Elver __no_kcsan
test_concurrent_races(struct kunit * test)7441fe84fd4SMarco Elver static void test_concurrent_races(struct kunit *test)
7451fe84fd4SMarco Elver {
7467310bd1fSMarco Elver struct expect_report expect = {
7471fe84fd4SMarco Elver .access = {
7481fe84fd4SMarco Elver /* NULL will match any address. */
749bec4a247SMarco Elver { test_kernel_rmw_array, NULL, 0, __KCSAN_ACCESS_RW(KCSAN_ACCESS_WRITE) },
750bec4a247SMarco Elver { test_kernel_rmw_array, NULL, 0, __KCSAN_ACCESS_RW(0) },
7511fe84fd4SMarco Elver },
7521fe84fd4SMarco Elver };
7537310bd1fSMarco Elver struct expect_report never = {
7541fe84fd4SMarco Elver .access = {
7551fe84fd4SMarco Elver { test_kernel_rmw_array, NULL, 0, 0 },
7561fe84fd4SMarco Elver { test_kernel_rmw_array, NULL, 0, 0 },
7571fe84fd4SMarco Elver },
7581fe84fd4SMarco Elver };
7591fe84fd4SMarco Elver bool match_expect = false;
7601fe84fd4SMarco Elver bool match_never = false;
7611fe84fd4SMarco Elver
7621fe84fd4SMarco Elver begin_test_checks(test_kernel_rmw_array, test_kernel_rmw_array);
7631fe84fd4SMarco Elver do {
7641fe84fd4SMarco Elver match_expect |= report_matches(&expect);
7651fe84fd4SMarco Elver match_never |= report_matches(&never);
7661fe84fd4SMarco Elver } while (!end_test_checks(false));
7671fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect); /* Sanity check matches exist. */
7681fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
7691fe84fd4SMarco Elver }
7701fe84fd4SMarco Elver
7711fe84fd4SMarco Elver /* Test the KCSAN_REPORT_VALUE_CHANGE_ONLY option. */
7721fe84fd4SMarco Elver __no_kcsan
test_novalue_change(struct kunit * test)7731fe84fd4SMarco Elver static void test_novalue_change(struct kunit *test)
7741fe84fd4SMarco Elver {
7757310bd1fSMarco Elver struct expect_report expect_rw = {
7761fe84fd4SMarco Elver .access = {
7771fe84fd4SMarco Elver { test_kernel_write_nochange, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
7781fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
7791fe84fd4SMarco Elver },
7801fe84fd4SMarco Elver };
7817310bd1fSMarco Elver struct expect_report expect_ww = {
782ade3a58bSMarco Elver .access = {
783ade3a58bSMarco Elver { test_kernel_write_nochange, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
784ade3a58bSMarco Elver { test_kernel_write_nochange, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
785ade3a58bSMarco Elver },
786ade3a58bSMarco Elver };
7871fe84fd4SMarco Elver bool match_expect = false;
7881fe84fd4SMarco Elver
789ade3a58bSMarco Elver test_kernel_write_nochange(); /* Reset value. */
7901fe84fd4SMarco Elver begin_test_checks(test_kernel_write_nochange, test_kernel_read);
7911fe84fd4SMarco Elver do {
792ade3a58bSMarco Elver match_expect = report_matches(&expect_rw) || report_matches(&expect_ww);
7931fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
7941fe84fd4SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_REPORT_VALUE_CHANGE_ONLY))
7951fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
7961fe84fd4SMarco Elver else
7971fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
7981fe84fd4SMarco Elver }
7991fe84fd4SMarco Elver
8001fe84fd4SMarco Elver /*
8011fe84fd4SMarco Elver * Test that the rules where the KCSAN_REPORT_VALUE_CHANGE_ONLY option should
8021fe84fd4SMarco Elver * never apply work.
8031fe84fd4SMarco Elver */
8041fe84fd4SMarco Elver __no_kcsan
test_novalue_change_exception(struct kunit * test)8051fe84fd4SMarco Elver static void test_novalue_change_exception(struct kunit *test)
8061fe84fd4SMarco Elver {
8077310bd1fSMarco Elver struct expect_report expect_rw = {
8081fe84fd4SMarco Elver .access = {
8091fe84fd4SMarco Elver { test_kernel_write_nochange_rcu, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
8101fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
8111fe84fd4SMarco Elver },
8121fe84fd4SMarco Elver };
8137310bd1fSMarco Elver struct expect_report expect_ww = {
814ade3a58bSMarco Elver .access = {
815ade3a58bSMarco Elver { test_kernel_write_nochange_rcu, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
816ade3a58bSMarco Elver { test_kernel_write_nochange_rcu, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
817ade3a58bSMarco Elver },
818ade3a58bSMarco Elver };
8191fe84fd4SMarco Elver bool match_expect = false;
8201fe84fd4SMarco Elver
821ade3a58bSMarco Elver test_kernel_write_nochange_rcu(); /* Reset value. */
8221fe84fd4SMarco Elver begin_test_checks(test_kernel_write_nochange_rcu, test_kernel_read);
8231fe84fd4SMarco Elver do {
824ade3a58bSMarco Elver match_expect = report_matches(&expect_rw) || report_matches(&expect_ww);
8251fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
8261fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
8271fe84fd4SMarco Elver }
8281fe84fd4SMarco Elver
8291fe84fd4SMarco Elver /* Test that data races of unknown origin are reported. */
8301fe84fd4SMarco Elver __no_kcsan
test_unknown_origin(struct kunit * test)8311fe84fd4SMarco Elver static void test_unknown_origin(struct kunit *test)
8321fe84fd4SMarco Elver {
8337310bd1fSMarco Elver struct expect_report expect = {
8341fe84fd4SMarco Elver .access = {
8351fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
8361fe84fd4SMarco Elver { NULL },
8371fe84fd4SMarco Elver },
8381fe84fd4SMarco Elver };
8391fe84fd4SMarco Elver bool match_expect = false;
8401fe84fd4SMarco Elver
8411fe84fd4SMarco Elver begin_test_checks(test_kernel_write_uninstrumented, test_kernel_read);
8421fe84fd4SMarco Elver do {
8431fe84fd4SMarco Elver match_expect = report_matches(&expect);
8441fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
8451fe84fd4SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_REPORT_RACE_UNKNOWN_ORIGIN))
8461fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
8471fe84fd4SMarco Elver else
8481fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
8491fe84fd4SMarco Elver }
8501fe84fd4SMarco Elver
8511fe84fd4SMarco Elver /* Test KCSAN_ASSUME_PLAIN_WRITES_ATOMIC if it is selected. */
8521fe84fd4SMarco Elver __no_kcsan
test_write_write_assume_atomic(struct kunit * test)8531fe84fd4SMarco Elver static void test_write_write_assume_atomic(struct kunit *test)
8541fe84fd4SMarco Elver {
8557310bd1fSMarco Elver struct expect_report expect = {
8561fe84fd4SMarco Elver .access = {
8571fe84fd4SMarco Elver { test_kernel_write, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
8581fe84fd4SMarco Elver { test_kernel_write, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
8591fe84fd4SMarco Elver },
8601fe84fd4SMarco Elver };
8611fe84fd4SMarco Elver bool match_expect = false;
8621fe84fd4SMarco Elver
8631fe84fd4SMarco Elver begin_test_checks(test_kernel_write, test_kernel_write);
8641fe84fd4SMarco Elver do {
8651fe84fd4SMarco Elver sink_value(READ_ONCE(test_var)); /* induce value-change */
8661fe84fd4SMarco Elver match_expect = report_matches(&expect);
8671fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
8681fe84fd4SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_ASSUME_PLAIN_WRITES_ATOMIC))
8691fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
8701fe84fd4SMarco Elver else
8711fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
8721fe84fd4SMarco Elver }
8731fe84fd4SMarco Elver
8741fe84fd4SMarco Elver /*
8751fe84fd4SMarco Elver * Test that data races with writes larger than word-size are always reported,
8761fe84fd4SMarco Elver * even if KCSAN_ASSUME_PLAIN_WRITES_ATOMIC is selected.
8771fe84fd4SMarco Elver */
8781fe84fd4SMarco Elver __no_kcsan
test_write_write_struct(struct kunit * test)8791fe84fd4SMarco Elver static void test_write_write_struct(struct kunit *test)
8801fe84fd4SMarco Elver {
8817310bd1fSMarco Elver struct expect_report expect = {
8821fe84fd4SMarco Elver .access = {
8831fe84fd4SMarco Elver { test_kernel_write_struct, &test_struct, sizeof(test_struct), KCSAN_ACCESS_WRITE },
8841fe84fd4SMarco Elver { test_kernel_write_struct, &test_struct, sizeof(test_struct), KCSAN_ACCESS_WRITE },
8851fe84fd4SMarco Elver },
8861fe84fd4SMarco Elver };
8871fe84fd4SMarco Elver bool match_expect = false;
8881fe84fd4SMarco Elver
8891fe84fd4SMarco Elver begin_test_checks(test_kernel_write_struct, test_kernel_write_struct);
8901fe84fd4SMarco Elver do {
8911fe84fd4SMarco Elver match_expect = report_matches(&expect);
8921fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
8931fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
8941fe84fd4SMarco Elver }
8951fe84fd4SMarco Elver
8961fe84fd4SMarco Elver /*
8971fe84fd4SMarco Elver * Test that data races where only one write is larger than word-size are always
8981fe84fd4SMarco Elver * reported, even if KCSAN_ASSUME_PLAIN_WRITES_ATOMIC is selected.
8991fe84fd4SMarco Elver */
9001fe84fd4SMarco Elver __no_kcsan
test_write_write_struct_part(struct kunit * test)9011fe84fd4SMarco Elver static void test_write_write_struct_part(struct kunit *test)
9021fe84fd4SMarco Elver {
9037310bd1fSMarco Elver struct expect_report expect = {
9041fe84fd4SMarco Elver .access = {
9051fe84fd4SMarco Elver { test_kernel_write_struct, &test_struct, sizeof(test_struct), KCSAN_ACCESS_WRITE },
9061fe84fd4SMarco Elver { test_kernel_write_struct_part, &test_struct.val[3], sizeof(test_struct.val[3]), KCSAN_ACCESS_WRITE },
9071fe84fd4SMarco Elver },
9081fe84fd4SMarco Elver };
9091fe84fd4SMarco Elver bool match_expect = false;
9101fe84fd4SMarco Elver
9111fe84fd4SMarco Elver begin_test_checks(test_kernel_write_struct, test_kernel_write_struct_part);
9121fe84fd4SMarco Elver do {
9131fe84fd4SMarco Elver match_expect = report_matches(&expect);
9141fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
9151fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
9161fe84fd4SMarco Elver }
9171fe84fd4SMarco Elver
9181fe84fd4SMarco Elver /* Test that races with atomic accesses never result in reports. */
9191fe84fd4SMarco Elver __no_kcsan
test_read_atomic_write_atomic(struct kunit * test)9201fe84fd4SMarco Elver static void test_read_atomic_write_atomic(struct kunit *test)
9211fe84fd4SMarco Elver {
9221fe84fd4SMarco Elver bool match_never = false;
9231fe84fd4SMarco Elver
9241fe84fd4SMarco Elver begin_test_checks(test_kernel_read_atomic, test_kernel_write_atomic);
9251fe84fd4SMarco Elver do {
9261fe84fd4SMarco Elver match_never = report_available();
9271fe84fd4SMarco Elver } while (!end_test_checks(match_never));
9281fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
9291fe84fd4SMarco Elver }
9301fe84fd4SMarco Elver
9311fe84fd4SMarco Elver /* Test that a race with an atomic and plain access result in reports. */
9321fe84fd4SMarco Elver __no_kcsan
test_read_plain_atomic_write(struct kunit * test)9331fe84fd4SMarco Elver static void test_read_plain_atomic_write(struct kunit *test)
9341fe84fd4SMarco Elver {
9357310bd1fSMarco Elver struct expect_report expect = {
9361fe84fd4SMarco Elver .access = {
9371fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
9381fe84fd4SMarco Elver { test_kernel_write_atomic, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE | KCSAN_ACCESS_ATOMIC },
9391fe84fd4SMarco Elver },
9401fe84fd4SMarco Elver };
9411fe84fd4SMarco Elver bool match_expect = false;
9421fe84fd4SMarco Elver
94380804284SMarco Elver KCSAN_TEST_REQUIRES(test, !IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS));
9441fe84fd4SMarco Elver
9451fe84fd4SMarco Elver begin_test_checks(test_kernel_read, test_kernel_write_atomic);
9461fe84fd4SMarco Elver do {
9471fe84fd4SMarco Elver match_expect = report_matches(&expect);
9481fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
9491fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
9501fe84fd4SMarco Elver }
9511fe84fd4SMarco Elver
952bec4a247SMarco Elver /* Test that atomic RMWs generate correct report. */
953bec4a247SMarco Elver __no_kcsan
test_read_plain_atomic_rmw(struct kunit * test)954bec4a247SMarco Elver static void test_read_plain_atomic_rmw(struct kunit *test)
955bec4a247SMarco Elver {
9567310bd1fSMarco Elver struct expect_report expect = {
957bec4a247SMarco Elver .access = {
958bec4a247SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
959bec4a247SMarco Elver { test_kernel_atomic_rmw, &test_var, sizeof(test_var),
960bec4a247SMarco Elver KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | KCSAN_ACCESS_ATOMIC },
961bec4a247SMarco Elver },
962bec4a247SMarco Elver };
963bec4a247SMarco Elver bool match_expect = false;
964bec4a247SMarco Elver
96580804284SMarco Elver KCSAN_TEST_REQUIRES(test, !IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS));
966bec4a247SMarco Elver
967bec4a247SMarco Elver begin_test_checks(test_kernel_read, test_kernel_atomic_rmw);
968bec4a247SMarco Elver do {
969bec4a247SMarco Elver match_expect = report_matches(&expect);
970bec4a247SMarco Elver } while (!end_test_checks(match_expect));
971bec4a247SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
972bec4a247SMarco Elver }
973bec4a247SMarco Elver
9741fe84fd4SMarco Elver /* Zero-sized accesses should never cause data race reports. */
9751fe84fd4SMarco Elver __no_kcsan
test_zero_size_access(struct kunit * test)9761fe84fd4SMarco Elver static void test_zero_size_access(struct kunit *test)
9771fe84fd4SMarco Elver {
9787310bd1fSMarco Elver struct expect_report expect = {
9791fe84fd4SMarco Elver .access = {
9801fe84fd4SMarco Elver { test_kernel_write_struct, &test_struct, sizeof(test_struct), KCSAN_ACCESS_WRITE },
9811fe84fd4SMarco Elver { test_kernel_write_struct, &test_struct, sizeof(test_struct), KCSAN_ACCESS_WRITE },
9821fe84fd4SMarco Elver },
9831fe84fd4SMarco Elver };
9847310bd1fSMarco Elver struct expect_report never = {
9851fe84fd4SMarco Elver .access = {
9861fe84fd4SMarco Elver { test_kernel_write_struct, &test_struct, sizeof(test_struct), KCSAN_ACCESS_WRITE },
9871fe84fd4SMarco Elver { test_kernel_read_struct_zero_size, &test_struct.val[3], 0, 0 },
9881fe84fd4SMarco Elver },
9891fe84fd4SMarco Elver };
9901fe84fd4SMarco Elver bool match_expect = false;
9911fe84fd4SMarco Elver bool match_never = false;
9921fe84fd4SMarco Elver
9931fe84fd4SMarco Elver begin_test_checks(test_kernel_write_struct, test_kernel_read_struct_zero_size);
9941fe84fd4SMarco Elver do {
9951fe84fd4SMarco Elver match_expect |= report_matches(&expect);
9961fe84fd4SMarco Elver match_never = report_matches(&never);
9971fe84fd4SMarco Elver } while (!end_test_checks(match_never));
9981fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect); /* Sanity check. */
9991fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
10001fe84fd4SMarco Elver }
10011fe84fd4SMarco Elver
10021fe84fd4SMarco Elver /* Test the data_race() macro. */
10031fe84fd4SMarco Elver __no_kcsan
test_data_race(struct kunit * test)10041fe84fd4SMarco Elver static void test_data_race(struct kunit *test)
10051fe84fd4SMarco Elver {
10061fe84fd4SMarco Elver bool match_never = false;
10071fe84fd4SMarco Elver
10081fe84fd4SMarco Elver begin_test_checks(test_kernel_data_race, test_kernel_data_race);
10091fe84fd4SMarco Elver do {
10101fe84fd4SMarco Elver match_never = report_available();
10111fe84fd4SMarco Elver } while (!end_test_checks(match_never));
10121fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
10131fe84fd4SMarco Elver }
10141fe84fd4SMarco Elver
101531f605a3SMarco Elver /* Test the __data_racy type qualifier. */
101631f605a3SMarco Elver __no_kcsan
test_data_racy_qualifier(struct kunit * test)101731f605a3SMarco Elver static void test_data_racy_qualifier(struct kunit *test)
101831f605a3SMarco Elver {
101931f605a3SMarco Elver bool match_never = false;
102031f605a3SMarco Elver
102131f605a3SMarco Elver begin_test_checks(test_kernel_data_racy_qualifier, test_kernel_data_racy_qualifier);
102231f605a3SMarco Elver do {
102331f605a3SMarco Elver match_never = report_available();
102431f605a3SMarco Elver } while (!end_test_checks(match_never));
102531f605a3SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
102631f605a3SMarco Elver }
102731f605a3SMarco Elver
10281fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_writer(struct kunit * test)10291fe84fd4SMarco Elver static void test_assert_exclusive_writer(struct kunit *test)
10301fe84fd4SMarco Elver {
10317310bd1fSMarco Elver struct expect_report expect = {
10321fe84fd4SMarco Elver .access = {
10331fe84fd4SMarco Elver { test_kernel_assert_writer, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT },
10341fe84fd4SMarco Elver { test_kernel_write_nochange, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
10351fe84fd4SMarco Elver },
10361fe84fd4SMarco Elver };
10371fe84fd4SMarco Elver bool match_expect = false;
10381fe84fd4SMarco Elver
10391fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_writer, test_kernel_write_nochange);
10401fe84fd4SMarco Elver do {
10411fe84fd4SMarco Elver match_expect = report_matches(&expect);
10421fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
10431fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
10441fe84fd4SMarco Elver }
10451fe84fd4SMarco Elver
10461fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_access(struct kunit * test)10471fe84fd4SMarco Elver static void test_assert_exclusive_access(struct kunit *test)
10481fe84fd4SMarco Elver {
10497310bd1fSMarco Elver struct expect_report expect = {
10501fe84fd4SMarco Elver .access = {
10511fe84fd4SMarco Elver { test_kernel_assert_access, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_WRITE },
10521fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
10531fe84fd4SMarco Elver },
10541fe84fd4SMarco Elver };
10551fe84fd4SMarco Elver bool match_expect = false;
10561fe84fd4SMarco Elver
10571fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_access, test_kernel_read);
10581fe84fd4SMarco Elver do {
10591fe84fd4SMarco Elver match_expect = report_matches(&expect);
10601fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
10611fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
10621fe84fd4SMarco Elver }
10631fe84fd4SMarco Elver
10641fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_access_writer(struct kunit * test)10651fe84fd4SMarco Elver static void test_assert_exclusive_access_writer(struct kunit *test)
10661fe84fd4SMarco Elver {
10677310bd1fSMarco Elver struct expect_report expect_access_writer = {
10681fe84fd4SMarco Elver .access = {
10691fe84fd4SMarco Elver { test_kernel_assert_access, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_WRITE },
10701fe84fd4SMarco Elver { test_kernel_assert_writer, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT },
10711fe84fd4SMarco Elver },
10721fe84fd4SMarco Elver };
10737310bd1fSMarco Elver struct expect_report expect_access_access = {
10741fe84fd4SMarco Elver .access = {
10751fe84fd4SMarco Elver { test_kernel_assert_access, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_WRITE },
10761fe84fd4SMarco Elver { test_kernel_assert_access, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_WRITE },
10771fe84fd4SMarco Elver },
10781fe84fd4SMarco Elver };
10797310bd1fSMarco Elver struct expect_report never = {
10801fe84fd4SMarco Elver .access = {
10811fe84fd4SMarco Elver { test_kernel_assert_writer, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT },
10821fe84fd4SMarco Elver { test_kernel_assert_writer, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT },
10831fe84fd4SMarco Elver },
10841fe84fd4SMarco Elver };
10851fe84fd4SMarco Elver bool match_expect_access_writer = false;
10861fe84fd4SMarco Elver bool match_expect_access_access = false;
10871fe84fd4SMarco Elver bool match_never = false;
10881fe84fd4SMarco Elver
10891fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_access, test_kernel_assert_writer);
10901fe84fd4SMarco Elver do {
10911fe84fd4SMarco Elver match_expect_access_writer |= report_matches(&expect_access_writer);
10921fe84fd4SMarco Elver match_expect_access_access |= report_matches(&expect_access_access);
10931fe84fd4SMarco Elver match_never |= report_matches(&never);
10941fe84fd4SMarco Elver } while (!end_test_checks(match_never));
10951fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect_access_writer);
10961fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect_access_access);
10971fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
10981fe84fd4SMarco Elver }
10991fe84fd4SMarco Elver
11001fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_bits_change(struct kunit * test)11011fe84fd4SMarco Elver static void test_assert_exclusive_bits_change(struct kunit *test)
11021fe84fd4SMarco Elver {
11037310bd1fSMarco Elver struct expect_report expect = {
11041fe84fd4SMarco Elver .access = {
11051fe84fd4SMarco Elver { test_kernel_assert_bits_change, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT },
11061fe84fd4SMarco Elver { test_kernel_change_bits, &test_var, sizeof(test_var),
11071fe84fd4SMarco Elver KCSAN_ACCESS_WRITE | (IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS) ? 0 : KCSAN_ACCESS_ATOMIC) },
11081fe84fd4SMarco Elver },
11091fe84fd4SMarco Elver };
11101fe84fd4SMarco Elver bool match_expect = false;
11111fe84fd4SMarco Elver
11121fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_bits_change, test_kernel_change_bits);
11131fe84fd4SMarco Elver do {
11141fe84fd4SMarco Elver match_expect = report_matches(&expect);
11151fe84fd4SMarco Elver } while (!end_test_checks(match_expect));
11161fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
11171fe84fd4SMarco Elver }
11181fe84fd4SMarco Elver
11191fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_bits_nochange(struct kunit * test)11201fe84fd4SMarco Elver static void test_assert_exclusive_bits_nochange(struct kunit *test)
11211fe84fd4SMarco Elver {
11221fe84fd4SMarco Elver bool match_never = false;
11231fe84fd4SMarco Elver
11241fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_bits_nochange, test_kernel_change_bits);
11251fe84fd4SMarco Elver do {
11261fe84fd4SMarco Elver match_never = report_available();
11271fe84fd4SMarco Elver } while (!end_test_checks(match_never));
11281fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
11291fe84fd4SMarco Elver }
11301fe84fd4SMarco Elver
11311fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_writer_scoped(struct kunit * test)11321fe84fd4SMarco Elver static void test_assert_exclusive_writer_scoped(struct kunit *test)
11331fe84fd4SMarco Elver {
11347310bd1fSMarco Elver struct expect_report expect_start = {
11351fe84fd4SMarco Elver .access = {
11361fe84fd4SMarco Elver { test_kernel_assert_writer_scoped, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_SCOPED },
11371fe84fd4SMarco Elver { test_kernel_write_nochange, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
11381fe84fd4SMarco Elver },
11391fe84fd4SMarco Elver };
11407310bd1fSMarco Elver struct expect_report expect_inscope = {
11411fe84fd4SMarco Elver .access = {
11421fe84fd4SMarco Elver { test_enter_scope, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_SCOPED },
11431fe84fd4SMarco Elver { test_kernel_write_nochange, &test_var, sizeof(test_var), KCSAN_ACCESS_WRITE },
11441fe84fd4SMarco Elver },
11451fe84fd4SMarco Elver };
11461fe84fd4SMarco Elver bool match_expect_start = false;
11476c65eb75SMarco Elver bool match_expect_inscope = false;
11481fe84fd4SMarco Elver
11491fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_writer_scoped, test_kernel_write_nochange);
11501fe84fd4SMarco Elver do {
11511fe84fd4SMarco Elver match_expect_start |= report_matches(&expect_start);
11526c65eb75SMarco Elver match_expect_inscope |= report_matches(&expect_inscope);
11536c65eb75SMarco Elver } while (!end_test_checks(match_expect_inscope));
11541fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect_start);
11556c65eb75SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect_inscope);
11561fe84fd4SMarco Elver }
11571fe84fd4SMarco Elver
11581fe84fd4SMarco Elver __no_kcsan
test_assert_exclusive_access_scoped(struct kunit * test)11591fe84fd4SMarco Elver static void test_assert_exclusive_access_scoped(struct kunit *test)
11601fe84fd4SMarco Elver {
11617310bd1fSMarco Elver struct expect_report expect_start1 = {
11621fe84fd4SMarco Elver .access = {
11631fe84fd4SMarco Elver { test_kernel_assert_access_scoped, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_WRITE | KCSAN_ACCESS_SCOPED },
11641fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
11651fe84fd4SMarco Elver },
11661fe84fd4SMarco Elver };
11677310bd1fSMarco Elver struct expect_report expect_start2 = {
11681fe84fd4SMarco Elver .access = { expect_start1.access[0], expect_start1.access[0] },
11691fe84fd4SMarco Elver };
11707310bd1fSMarco Elver struct expect_report expect_inscope = {
11711fe84fd4SMarco Elver .access = {
11721fe84fd4SMarco Elver { test_enter_scope, &test_var, sizeof(test_var), KCSAN_ACCESS_ASSERT | KCSAN_ACCESS_WRITE | KCSAN_ACCESS_SCOPED },
11731fe84fd4SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
11741fe84fd4SMarco Elver },
11751fe84fd4SMarco Elver };
11761fe84fd4SMarco Elver bool match_expect_start = false;
11771fe84fd4SMarco Elver bool match_expect_inscope = false;
11781fe84fd4SMarco Elver
11791fe84fd4SMarco Elver begin_test_checks(test_kernel_assert_access_scoped, test_kernel_read);
11801fe84fd4SMarco Elver end_time += msecs_to_jiffies(1000); /* This test requires a bit more time. */
11811fe84fd4SMarco Elver do {
11821fe84fd4SMarco Elver match_expect_start |= report_matches(&expect_start1) || report_matches(&expect_start2);
11831fe84fd4SMarco Elver match_expect_inscope |= report_matches(&expect_inscope);
11846c65eb75SMarco Elver } while (!end_test_checks(match_expect_inscope));
11851fe84fd4SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect_start);
11866c65eb75SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect_inscope);
11871fe84fd4SMarco Elver }
11881fe84fd4SMarco Elver
118956b031f0SMarco Elver /*
119056b031f0SMarco Elver * jiffies is special (declared to be volatile) and its accesses are typically
119156b031f0SMarco Elver * not marked; this test ensures that the compiler nor KCSAN gets confused about
119256b031f0SMarco Elver * jiffies's declaration on different architectures.
119356b031f0SMarco Elver */
119456b031f0SMarco Elver __no_kcsan
test_jiffies_noreport(struct kunit * test)119556b031f0SMarco Elver static void test_jiffies_noreport(struct kunit *test)
119656b031f0SMarco Elver {
119756b031f0SMarco Elver bool match_never = false;
119856b031f0SMarco Elver
119956b031f0SMarco Elver begin_test_checks(test_kernel_jiffies_reader, test_kernel_jiffies_reader);
120056b031f0SMarco Elver do {
120156b031f0SMarco Elver match_never = report_available();
120256b031f0SMarco Elver } while (!end_test_checks(match_never));
120356b031f0SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
120456b031f0SMarco Elver }
120556b031f0SMarco Elver
12061fe84fd4SMarco Elver /* Test that racing accesses in seqlock critical sections are not reported. */
12071fe84fd4SMarco Elver __no_kcsan
test_seqlock_noreport(struct kunit * test)12081fe84fd4SMarco Elver static void test_seqlock_noreport(struct kunit *test)
12091fe84fd4SMarco Elver {
12101fe84fd4SMarco Elver bool match_never = false;
12111fe84fd4SMarco Elver
12121fe84fd4SMarco Elver begin_test_checks(test_kernel_seqlock_reader, test_kernel_seqlock_writer);
12131fe84fd4SMarco Elver do {
12141fe84fd4SMarco Elver match_never = report_available();
12151fe84fd4SMarco Elver } while (!end_test_checks(match_never));
12161fe84fd4SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
12171fe84fd4SMarco Elver }
12181fe84fd4SMarco Elver
12191fe84fd4SMarco Elver /*
1220f9ea6319SMarco Elver * Test atomic builtins work and required instrumentation functions exist. We
1221f9ea6319SMarco Elver * also test that KCSAN understands they're atomic by racing with them via
1222f9ea6319SMarco Elver * test_kernel_atomic_builtins(), and expect no reports.
1223f9ea6319SMarco Elver *
1224f9ea6319SMarco Elver * The atomic builtins _SHOULD NOT_ be used in normal kernel code!
1225f9ea6319SMarco Elver */
test_atomic_builtins(struct kunit * test)1226f9ea6319SMarco Elver static void test_atomic_builtins(struct kunit *test)
1227f9ea6319SMarco Elver {
1228f9ea6319SMarco Elver bool match_never = false;
1229f9ea6319SMarco Elver
1230f9ea6319SMarco Elver begin_test_checks(test_kernel_atomic_builtins, test_kernel_atomic_builtins);
1231f9ea6319SMarco Elver do {
1232f9ea6319SMarco Elver long tmp;
1233f9ea6319SMarco Elver
1234f9ea6319SMarco Elver kcsan_enable_current();
1235f9ea6319SMarco Elver
1236f9ea6319SMarco Elver __atomic_store_n(&test_var, 42L, __ATOMIC_RELAXED);
1237f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 42L, __atomic_load_n(&test_var, __ATOMIC_RELAXED));
1238f9ea6319SMarco Elver
1239f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 42L, __atomic_exchange_n(&test_var, 20, __ATOMIC_RELAXED));
1240f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 20L, test_var);
1241f9ea6319SMarco Elver
1242f9ea6319SMarco Elver tmp = 20L;
1243f9ea6319SMarco Elver KUNIT_EXPECT_TRUE(test, __atomic_compare_exchange_n(&test_var, &tmp, 30L,
1244f9ea6319SMarco Elver 0, __ATOMIC_RELAXED,
1245f9ea6319SMarco Elver __ATOMIC_RELAXED));
1246f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, tmp, 20L);
1247f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, test_var, 30L);
1248f9ea6319SMarco Elver KUNIT_EXPECT_FALSE(test, __atomic_compare_exchange_n(&test_var, &tmp, 40L,
1249f9ea6319SMarco Elver 1, __ATOMIC_RELAXED,
1250f9ea6319SMarco Elver __ATOMIC_RELAXED));
1251f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, tmp, 30L);
1252f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, test_var, 30L);
1253f9ea6319SMarco Elver
1254f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 30L, __atomic_fetch_add(&test_var, 1, __ATOMIC_RELAXED));
1255f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 31L, __atomic_fetch_sub(&test_var, 1, __ATOMIC_RELAXED));
1256f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 30L, __atomic_fetch_and(&test_var, 0xf, __ATOMIC_RELAXED));
1257f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 14L, __atomic_fetch_xor(&test_var, 0xf, __ATOMIC_RELAXED));
1258f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 1L, __atomic_fetch_or(&test_var, 0xf0, __ATOMIC_RELAXED));
1259f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, 241L, __atomic_fetch_nand(&test_var, 0xf, __ATOMIC_RELAXED));
1260f9ea6319SMarco Elver KUNIT_EXPECT_EQ(test, -2L, test_var);
1261f9ea6319SMarco Elver
1262f9ea6319SMarco Elver __atomic_thread_fence(__ATOMIC_SEQ_CST);
1263f9ea6319SMarco Elver __atomic_signal_fence(__ATOMIC_SEQ_CST);
1264f9ea6319SMarco Elver
1265f9ea6319SMarco Elver kcsan_disable_current();
1266f9ea6319SMarco Elver
1267f9ea6319SMarco Elver match_never = report_available();
1268f9ea6319SMarco Elver } while (!end_test_checks(match_never));
1269f9ea6319SMarco Elver KUNIT_EXPECT_FALSE(test, match_never);
1270f9ea6319SMarco Elver }
1271f9ea6319SMarco Elver
1272d8fd74d3SMarco Elver __no_kcsan
test_1bit_value_change(struct kunit * test)1273d8fd74d3SMarco Elver static void test_1bit_value_change(struct kunit *test)
1274d8fd74d3SMarco Elver {
12757310bd1fSMarco Elver struct expect_report expect = {
1276d8fd74d3SMarco Elver .access = {
1277d8fd74d3SMarco Elver { test_kernel_read, &test_var, sizeof(test_var), 0 },
1278d8fd74d3SMarco Elver { test_kernel_xor_1bit, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(KCSAN_ACCESS_WRITE) },
1279d8fd74d3SMarco Elver },
1280d8fd74d3SMarco Elver };
1281d8fd74d3SMarco Elver bool match = false;
1282d8fd74d3SMarco Elver
1283d8fd74d3SMarco Elver begin_test_checks(test_kernel_read, test_kernel_xor_1bit);
1284d8fd74d3SMarco Elver do {
1285d8fd74d3SMarco Elver match = IS_ENABLED(CONFIG_KCSAN_PERMISSIVE)
1286d8fd74d3SMarco Elver ? report_available()
1287d8fd74d3SMarco Elver : report_matches(&expect);
1288d8fd74d3SMarco Elver } while (!end_test_checks(match));
1289d8fd74d3SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_PERMISSIVE))
1290d8fd74d3SMarco Elver KUNIT_EXPECT_FALSE(test, match);
1291d8fd74d3SMarco Elver else
1292d8fd74d3SMarco Elver KUNIT_EXPECT_TRUE(test, match);
1293d8fd74d3SMarco Elver }
1294d8fd74d3SMarco Elver
12958bc32b34SMarco Elver __no_kcsan
test_correct_barrier(struct kunit * test)12968bc32b34SMarco Elver static void test_correct_barrier(struct kunit *test)
12978bc32b34SMarco Elver {
12988bc32b34SMarco Elver struct expect_report expect = {
12998bc32b34SMarco Elver .access = {
13008bc32b34SMarco Elver { test_kernel_with_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(KCSAN_ACCESS_WRITE) },
13018bc32b34SMarco Elver { test_kernel_with_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(0) },
13028bc32b34SMarco Elver },
13038bc32b34SMarco Elver };
13048bc32b34SMarco Elver bool match_expect = false;
13058bc32b34SMarco Elver
13068bc32b34SMarco Elver test_struct.val[0] = 0; /* init unlocked */
13078bc32b34SMarco Elver begin_test_checks(test_kernel_with_memorder, test_kernel_with_memorder);
13088bc32b34SMarco Elver do {
13098bc32b34SMarco Elver match_expect = report_matches_any_reordered(&expect);
13108bc32b34SMarco Elver } while (!end_test_checks(match_expect));
13118bc32b34SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
13128bc32b34SMarco Elver }
13138bc32b34SMarco Elver
13148bc32b34SMarco Elver __no_kcsan
test_missing_barrier(struct kunit * test)13158bc32b34SMarco Elver static void test_missing_barrier(struct kunit *test)
13168bc32b34SMarco Elver {
13178bc32b34SMarco Elver struct expect_report expect = {
13188bc32b34SMarco Elver .access = {
13198bc32b34SMarco Elver { test_kernel_wrong_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(KCSAN_ACCESS_WRITE) },
13208bc32b34SMarco Elver { test_kernel_wrong_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(0) },
13218bc32b34SMarco Elver },
13228bc32b34SMarco Elver };
13238bc32b34SMarco Elver bool match_expect = false;
13248bc32b34SMarco Elver
13258bc32b34SMarco Elver test_struct.val[0] = 0; /* init unlocked */
13268bc32b34SMarco Elver begin_test_checks(test_kernel_wrong_memorder, test_kernel_wrong_memorder);
13278bc32b34SMarco Elver do {
13288bc32b34SMarco Elver match_expect = report_matches_any_reordered(&expect);
13298bc32b34SMarco Elver } while (!end_test_checks(match_expect));
13308bc32b34SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_WEAK_MEMORY))
13318bc32b34SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
13328bc32b34SMarco Elver else
13338bc32b34SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
13348bc32b34SMarco Elver }
13358bc32b34SMarco Elver
13368bc32b34SMarco Elver __no_kcsan
test_atomic_builtins_correct_barrier(struct kunit * test)13378bc32b34SMarco Elver static void test_atomic_builtins_correct_barrier(struct kunit *test)
13388bc32b34SMarco Elver {
13398bc32b34SMarco Elver struct expect_report expect = {
13408bc32b34SMarco Elver .access = {
13418bc32b34SMarco Elver { test_kernel_atomic_builtin_with_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(KCSAN_ACCESS_WRITE) },
13428bc32b34SMarco Elver { test_kernel_atomic_builtin_with_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(0) },
13438bc32b34SMarco Elver },
13448bc32b34SMarco Elver };
13458bc32b34SMarco Elver bool match_expect = false;
13468bc32b34SMarco Elver
13478bc32b34SMarco Elver test_struct.val[0] = 0; /* init unlocked */
13488bc32b34SMarco Elver begin_test_checks(test_kernel_atomic_builtin_with_memorder,
13498bc32b34SMarco Elver test_kernel_atomic_builtin_with_memorder);
13508bc32b34SMarco Elver do {
13518bc32b34SMarco Elver match_expect = report_matches_any_reordered(&expect);
13528bc32b34SMarco Elver } while (!end_test_checks(match_expect));
13538bc32b34SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
13548bc32b34SMarco Elver }
13558bc32b34SMarco Elver
13568bc32b34SMarco Elver __no_kcsan
test_atomic_builtins_missing_barrier(struct kunit * test)13578bc32b34SMarco Elver static void test_atomic_builtins_missing_barrier(struct kunit *test)
13588bc32b34SMarco Elver {
13598bc32b34SMarco Elver struct expect_report expect = {
13608bc32b34SMarco Elver .access = {
13618bc32b34SMarco Elver { test_kernel_atomic_builtin_wrong_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(KCSAN_ACCESS_WRITE) },
13628bc32b34SMarco Elver { test_kernel_atomic_builtin_wrong_memorder, &test_var, sizeof(test_var), __KCSAN_ACCESS_RW(0) },
13638bc32b34SMarco Elver },
13648bc32b34SMarco Elver };
13658bc32b34SMarco Elver bool match_expect = false;
13668bc32b34SMarco Elver
13678bc32b34SMarco Elver test_struct.val[0] = 0; /* init unlocked */
13688bc32b34SMarco Elver begin_test_checks(test_kernel_atomic_builtin_wrong_memorder,
13698bc32b34SMarco Elver test_kernel_atomic_builtin_wrong_memorder);
13708bc32b34SMarco Elver do {
13718bc32b34SMarco Elver match_expect = report_matches_any_reordered(&expect);
13728bc32b34SMarco Elver } while (!end_test_checks(match_expect));
13738bc32b34SMarco Elver if (IS_ENABLED(CONFIG_KCSAN_WEAK_MEMORY))
13748bc32b34SMarco Elver KUNIT_EXPECT_TRUE(test, match_expect);
13758bc32b34SMarco Elver else
13768bc32b34SMarco Elver KUNIT_EXPECT_FALSE(test, match_expect);
13778bc32b34SMarco Elver }
13788bc32b34SMarco Elver
1379f9ea6319SMarco Elver /*
1380f6a14914SMarco Elver * Generate thread counts for all test cases. Values generated are in interval
1381f6a14914SMarco Elver * [2, 5] followed by exponentially increasing thread counts from 8 to 32.
13821fe84fd4SMarco Elver *
13831fe84fd4SMarco Elver * The thread counts are chosen to cover potentially interesting boundaries and
1384f6a14914SMarco Elver * corner cases (2 to 5), and then stress the system with larger counts.
13851fe84fd4SMarco Elver */
nthreads_gen_params(const void * prev,char * desc)1386f6a14914SMarco Elver static const void *nthreads_gen_params(const void *prev, char *desc)
1387f6a14914SMarco Elver {
1388f6a14914SMarco Elver long nthreads = (long)prev;
13891fe84fd4SMarco Elver
1390f6a14914SMarco Elver if (nthreads < 0 || nthreads >= 32)
1391f6a14914SMarco Elver nthreads = 0; /* stop */
1392f6a14914SMarco Elver else if (!nthreads)
1393f6a14914SMarco Elver nthreads = 2; /* initial value */
1394f6a14914SMarco Elver else if (nthreads < 5)
1395f6a14914SMarco Elver nthreads++;
1396f6a14914SMarco Elver else if (nthreads == 5)
1397f6a14914SMarco Elver nthreads = 8;
1398f6a14914SMarco Elver else
1399f6a14914SMarco Elver nthreads *= 2;
1400f6a14914SMarco Elver
14015693fa74SValentin Schneider if (!preempt_model_preemptible() ||
14025693fa74SValentin Schneider !IS_ENABLED(CONFIG_KCSAN_INTERRUPT_WATCHER)) {
1403f6a14914SMarco Elver /*
1404f6a14914SMarco Elver * Without any preemption, keep 2 CPUs free for other tasks, one
1405f6a14914SMarco Elver * of which is the main test case function checking for
1406f6a14914SMarco Elver * completion or failure.
1407f6a14914SMarco Elver */
14085693fa74SValentin Schneider const long min_unused_cpus = preempt_model_none() ? 2 : 0;
1409f6a14914SMarco Elver const long min_required_cpus = 2 + min_unused_cpus;
1410f6a14914SMarco Elver
1411f6a14914SMarco Elver if (num_online_cpus() < min_required_cpus) {
1412f4abe996SArnd Bergmann pr_err_once("Too few online CPUs (%u < %ld) for test\n",
1413f6a14914SMarco Elver num_online_cpus(), min_required_cpus);
1414f6a14914SMarco Elver nthreads = 0;
1415f6a14914SMarco Elver } else if (nthreads >= num_online_cpus() - min_unused_cpus) {
1416f6a14914SMarco Elver /* Use negative value to indicate last param. */
1417f6a14914SMarco Elver nthreads = -(num_online_cpus() - min_unused_cpus);
1418f6a14914SMarco Elver pr_warn_once("Limiting number of threads to %ld (only %d online CPUs)\n",
1419f6a14914SMarco Elver -nthreads, num_online_cpus());
1420f6a14914SMarco Elver }
1421f6a14914SMarco Elver }
1422f6a14914SMarco Elver
1423f6a14914SMarco Elver snprintf(desc, KUNIT_PARAM_DESC_SIZE, "threads=%ld", abs(nthreads));
1424f6a14914SMarco Elver return (void *)nthreads;
1425f6a14914SMarco Elver }
1426f6a14914SMarco Elver
1427f6a14914SMarco Elver #define KCSAN_KUNIT_CASE(test_name) KUNIT_CASE_PARAM(test_name, nthreads_gen_params)
14281fe84fd4SMarco Elver static struct kunit_case kcsan_test_cases[] = {
14298bc32b34SMarco Elver KUNIT_CASE(test_barrier_nothreads),
14301fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_basic),
14311fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_concurrent_races),
14321fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_novalue_change),
14331fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_novalue_change_exception),
14341fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_unknown_origin),
14351fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_write_write_assume_atomic),
14361fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_write_write_struct),
14371fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_write_write_struct_part),
14381fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_read_atomic_write_atomic),
14391fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_read_plain_atomic_write),
1440bec4a247SMarco Elver KCSAN_KUNIT_CASE(test_read_plain_atomic_rmw),
14411fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_zero_size_access),
14421fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_data_race),
144331f605a3SMarco Elver KCSAN_KUNIT_CASE(test_data_racy_qualifier),
14441fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_writer),
14451fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_access),
14461fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_access_writer),
14471fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_bits_change),
14481fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_bits_nochange),
14491fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_writer_scoped),
14501fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_assert_exclusive_access_scoped),
145156b031f0SMarco Elver KCSAN_KUNIT_CASE(test_jiffies_noreport),
14521fe84fd4SMarco Elver KCSAN_KUNIT_CASE(test_seqlock_noreport),
1453f9ea6319SMarco Elver KCSAN_KUNIT_CASE(test_atomic_builtins),
1454d8fd74d3SMarco Elver KCSAN_KUNIT_CASE(test_1bit_value_change),
14558bc32b34SMarco Elver KCSAN_KUNIT_CASE(test_correct_barrier),
14568bc32b34SMarco Elver KCSAN_KUNIT_CASE(test_missing_barrier),
14578bc32b34SMarco Elver KCSAN_KUNIT_CASE(test_atomic_builtins_correct_barrier),
14588bc32b34SMarco Elver KCSAN_KUNIT_CASE(test_atomic_builtins_missing_barrier),
14591fe84fd4SMarco Elver {},
14601fe84fd4SMarco Elver };
14611fe84fd4SMarco Elver
14621fe84fd4SMarco Elver /* ===== End test cases ===== */
14631fe84fd4SMarco Elver
14641fe84fd4SMarco Elver /* Concurrent accesses from interrupts. */
14651fe84fd4SMarco Elver __no_kcsan
access_thread_timer(struct timer_list * timer)14661fe84fd4SMarco Elver static void access_thread_timer(struct timer_list *timer)
14671fe84fd4SMarco Elver {
14681fe84fd4SMarco Elver static atomic_t cnt = ATOMIC_INIT(0);
14691fe84fd4SMarco Elver unsigned int idx;
14701fe84fd4SMarco Elver void (*func)(void);
14711fe84fd4SMarco Elver
14721fe84fd4SMarco Elver idx = (unsigned int)atomic_inc_return(&cnt) % ARRAY_SIZE(access_kernels);
14731fe84fd4SMarco Elver /* Acquire potential initialization. */
14741fe84fd4SMarco Elver func = smp_load_acquire(&access_kernels[idx]);
14751fe84fd4SMarco Elver if (func)
14761fe84fd4SMarco Elver func();
14771fe84fd4SMarco Elver }
14781fe84fd4SMarco Elver
14791fe84fd4SMarco Elver /* The main loop for each thread. */
14801fe84fd4SMarco Elver __no_kcsan
access_thread(void * arg)14811fe84fd4SMarco Elver static int access_thread(void *arg)
14821fe84fd4SMarco Elver {
14831fe84fd4SMarco Elver struct timer_list timer;
14841fe84fd4SMarco Elver unsigned int cnt = 0;
14851fe84fd4SMarco Elver unsigned int idx;
14861fe84fd4SMarco Elver void (*func)(void);
14871fe84fd4SMarco Elver
14881fe84fd4SMarco Elver timer_setup_on_stack(&timer, access_thread_timer, 0);
14891fe84fd4SMarco Elver do {
14901fe84fd4SMarco Elver might_sleep();
14911fe84fd4SMarco Elver
14921fe84fd4SMarco Elver if (!timer_pending(&timer))
14931fe84fd4SMarco Elver mod_timer(&timer, jiffies + 1);
14941fe84fd4SMarco Elver else {
14951fe84fd4SMarco Elver /* Iterate through all kernels. */
14961fe84fd4SMarco Elver idx = cnt++ % ARRAY_SIZE(access_kernels);
14971fe84fd4SMarco Elver /* Acquire potential initialization. */
14981fe84fd4SMarco Elver func = smp_load_acquire(&access_kernels[idx]);
14991fe84fd4SMarco Elver if (func)
15001fe84fd4SMarco Elver func();
15011fe84fd4SMarco Elver }
15021fe84fd4SMarco Elver } while (!torture_must_stop());
15038fa7292fSThomas Gleixner timer_delete_sync(&timer);
1504*aad823aaSIngo Molnar timer_destroy_on_stack(&timer);
15051fe84fd4SMarco Elver
15061fe84fd4SMarco Elver torture_kthread_stopping("access_thread");
15071fe84fd4SMarco Elver return 0;
15081fe84fd4SMarco Elver }
15091fe84fd4SMarco Elver
15101fe84fd4SMarco Elver __no_kcsan
test_init(struct kunit * test)15111fe84fd4SMarco Elver static int test_init(struct kunit *test)
15121fe84fd4SMarco Elver {
15131fe84fd4SMarco Elver unsigned long flags;
15141fe84fd4SMarco Elver int nthreads;
15151fe84fd4SMarco Elver int i;
15161fe84fd4SMarco Elver
15171fe84fd4SMarco Elver spin_lock_irqsave(&observed.lock, flags);
15181fe84fd4SMarco Elver for (i = 0; i < ARRAY_SIZE(observed.lines); ++i)
15191fe84fd4SMarco Elver observed.lines[i][0] = '\0';
15201fe84fd4SMarco Elver observed.nlines = 0;
15211fe84fd4SMarco Elver spin_unlock_irqrestore(&observed.lock, flags);
15221fe84fd4SMarco Elver
15238bc32b34SMarco Elver if (strstr(test->name, "nothreads"))
15248bc32b34SMarco Elver return 0;
15258bc32b34SMarco Elver
15261fe84fd4SMarco Elver if (!torture_init_begin((char *)test->name, 1))
15271fe84fd4SMarco Elver return -EBUSY;
15281fe84fd4SMarco Elver
15291fe84fd4SMarco Elver if (WARN_ON(threads))
15301fe84fd4SMarco Elver goto err;
15311fe84fd4SMarco Elver
15321fe84fd4SMarco Elver for (i = 0; i < ARRAY_SIZE(access_kernels); ++i) {
15331fe84fd4SMarco Elver if (WARN_ON(access_kernels[i]))
15341fe84fd4SMarco Elver goto err;
15351fe84fd4SMarco Elver }
15361fe84fd4SMarco Elver
1537f6a14914SMarco Elver nthreads = abs((long)test->param_value);
1538f6a14914SMarco Elver if (WARN_ON(!nthreads))
15391fe84fd4SMarco Elver goto err;
15401fe84fd4SMarco Elver
1541f6a14914SMarco Elver threads = kcalloc(nthreads + 1, sizeof(struct task_struct *), GFP_KERNEL);
15421fe84fd4SMarco Elver if (WARN_ON(!threads))
15431fe84fd4SMarco Elver goto err;
15441fe84fd4SMarco Elver
15451fe84fd4SMarco Elver threads[nthreads] = NULL;
15461fe84fd4SMarco Elver for (i = 0; i < nthreads; ++i) {
1547f6a14914SMarco Elver if (torture_create_kthread(access_thread, NULL, threads[i]))
15481fe84fd4SMarco Elver goto err;
15491fe84fd4SMarco Elver }
15501fe84fd4SMarco Elver
15511fe84fd4SMarco Elver torture_init_end();
15521fe84fd4SMarco Elver
15531fe84fd4SMarco Elver return 0;
15541fe84fd4SMarco Elver
15551fe84fd4SMarco Elver err:
15561fe84fd4SMarco Elver kfree(threads);
15571fe84fd4SMarco Elver threads = NULL;
15581fe84fd4SMarco Elver torture_init_end();
15591fe84fd4SMarco Elver return -EINVAL;
15601fe84fd4SMarco Elver }
15611fe84fd4SMarco Elver
15621fe84fd4SMarco Elver __no_kcsan
test_exit(struct kunit * test)15631fe84fd4SMarco Elver static void test_exit(struct kunit *test)
15641fe84fd4SMarco Elver {
15651fe84fd4SMarco Elver struct task_struct **stop_thread;
15661fe84fd4SMarco Elver int i;
15671fe84fd4SMarco Elver
15688bc32b34SMarco Elver if (strstr(test->name, "nothreads"))
15698bc32b34SMarco Elver return;
15708bc32b34SMarco Elver
15711fe84fd4SMarco Elver if (torture_cleanup_begin())
15721fe84fd4SMarco Elver return;
15731fe84fd4SMarco Elver
15741fe84fd4SMarco Elver for (i = 0; i < ARRAY_SIZE(access_kernels); ++i)
15751fe84fd4SMarco Elver WRITE_ONCE(access_kernels[i], NULL);
15761fe84fd4SMarco Elver
15771fe84fd4SMarco Elver if (threads) {
15781fe84fd4SMarco Elver for (stop_thread = threads; *stop_thread; stop_thread++)
15791fe84fd4SMarco Elver torture_stop_kthread(reader_thread, *stop_thread);
15801fe84fd4SMarco Elver
15811fe84fd4SMarco Elver kfree(threads);
15821fe84fd4SMarco Elver threads = NULL;
15831fe84fd4SMarco Elver }
15841fe84fd4SMarco Elver
15851fe84fd4SMarco Elver torture_cleanup_end();
15861fe84fd4SMarco Elver }
15871fe84fd4SMarco Elver
15881fe84fd4SMarco Elver __no_kcsan
register_tracepoints(void)15891f6ab566SPavankumar Kondeti static void register_tracepoints(void)
15901fe84fd4SMarco Elver {
15911f6ab566SPavankumar Kondeti register_trace_console(probe_console, NULL);
15921fe84fd4SMarco Elver }
15931fe84fd4SMarco Elver
15941fe84fd4SMarco Elver __no_kcsan
unregister_tracepoints(void)15951f6ab566SPavankumar Kondeti static void unregister_tracepoints(void)
15961fe84fd4SMarco Elver {
15971f6ab566SPavankumar Kondeti unregister_trace_console(probe_console, NULL);
15981fe84fd4SMarco Elver }
15991fe84fd4SMarco Elver
kcsan_suite_init(struct kunit_suite * suite)16002434031cSMarco Elver static int kcsan_suite_init(struct kunit_suite *suite)
16011fe84fd4SMarco Elver {
16021f6ab566SPavankumar Kondeti register_tracepoints();
16032434031cSMarco Elver return 0;
16041fe84fd4SMarco Elver }
16051fe84fd4SMarco Elver
kcsan_suite_exit(struct kunit_suite * suite)16062434031cSMarco Elver static void kcsan_suite_exit(struct kunit_suite *suite)
16071fe84fd4SMarco Elver {
16081f6ab566SPavankumar Kondeti unregister_tracepoints();
16091fe84fd4SMarco Elver tracepoint_synchronize_unregister();
16101fe84fd4SMarco Elver }
16111fe84fd4SMarco Elver
16122434031cSMarco Elver static struct kunit_suite kcsan_test_suite = {
16132434031cSMarco Elver .name = "kcsan",
16142434031cSMarco Elver .test_cases = kcsan_test_cases,
16152434031cSMarco Elver .init = test_init,
16162434031cSMarco Elver .exit = test_exit,
16172434031cSMarco Elver .suite_init = kcsan_suite_init,
16182434031cSMarco Elver .suite_exit = kcsan_suite_exit,
16192434031cSMarco Elver };
16202434031cSMarco Elver
16212434031cSMarco Elver kunit_test_suites(&kcsan_test_suite);
16221fe84fd4SMarco Elver
1623ddd7432dSJeff Johnson MODULE_DESCRIPTION("KCSAN test suite");
16241fe84fd4SMarco Elver MODULE_LICENSE("GPL v2");
16251fe84fd4SMarco Elver MODULE_AUTHOR("Marco Elver <elver@google.com>");
1626