1 // SPDX-License-Identifier: GPL-2.0
3 * KCSAN short boot-time selftests.
5 * Copyright (C) 2019, Google LLC.
8 #define pr_fmt(fmt) "kcsan: " fmt
10 #include <linux/atomic.h>
11 #include <linux/bitops.h>
12 #include <linux/init.h>
13 #include <linux/kcsan-checks.h>
14 #include <linux/kernel.h>
15 #include <linux/printk.h>
16 #include <linux/random.h>
17 #include <linux/sched.h>
18 #include <linux/spinlock.h>
19 #include <linux/types.h>
23 #define ITERS_PER_TEST 2000
25 /* Test requirements. */
26 static bool __init test_requires(void)
28 /* random should be initialized for the below tests */
29 return get_random_u32() + get_random_u32() != 0;
33 * Test watchpoint encode and decode: check that encoding some access's info,
34 * and then subsequent decode preserves the access's info.
36 static bool __init test_encode_decode(void)
40 for (i = 0; i < ITERS_PER_TEST; ++i) {
41 size_t size = prandom_u32_max(MAX_ENCODABLE_SIZE) + 1;
42 bool is_write = !!prandom_u32_max(2);
43 unsigned long verif_masked_addr;
44 long encoded_watchpoint;
49 get_random_bytes(&addr, sizeof(addr));
53 if (WARN_ON(!check_encodable(addr, size)))
56 encoded_watchpoint = encode_watchpoint(addr, size, is_write);
58 /* Check special watchpoints */
59 if (WARN_ON(decode_watchpoint(INVALID_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
61 if (WARN_ON(decode_watchpoint(CONSUMED_WATCHPOINT, &verif_masked_addr, &verif_size, &verif_is_write)))
64 /* Check decoding watchpoint returns same data */
65 if (WARN_ON(!decode_watchpoint(encoded_watchpoint, &verif_masked_addr, &verif_size, &verif_is_write)))
67 if (WARN_ON(verif_masked_addr != (addr & WATCHPOINT_ADDR_MASK)))
69 if (WARN_ON(verif_size != size))
71 if (WARN_ON(is_write != verif_is_write))
76 pr_err("%s fail: %s %zu bytes @ %lx -> encoded: %lx -> %s %zu bytes @ %lx\n",
77 __func__, is_write ? "write" : "read", size, addr, encoded_watchpoint,
78 verif_is_write ? "write" : "read", verif_size, verif_masked_addr);
85 /* Test access matching function. */
86 static bool __init test_matching_access(void)
88 if (WARN_ON(!matching_access(10, 1, 10, 1)))
90 if (WARN_ON(!matching_access(10, 2, 11, 1)))
92 if (WARN_ON(!matching_access(10, 1, 9, 2)))
94 if (WARN_ON(matching_access(10, 1, 11, 1)))
96 if (WARN_ON(matching_access(9, 1, 10, 1)))
100 * An access of size 0 could match another access, as demonstrated here.
101 * Rather than add more comparisons to 'matching_access()', which would
102 * end up in the fast-path for *all* checks, check_access() simply
103 * returns for all accesses of size 0.
105 if (WARN_ON(!matching_access(8, 8, 12, 0)))
112 * Correct memory barrier instrumentation is critical to avoiding false
113 * positives: simple test to check at boot certain barriers are always properly
114 * instrumented. See kcsan_test for a more complete test.
116 static DEFINE_SPINLOCK(test_spinlock);
117 static bool __init test_barrier(void)
119 #ifdef CONFIG_KCSAN_WEAK_MEMORY
120 struct kcsan_scoped_access *reorder_access = ¤t->kcsan_ctx.reorder_access;
122 struct kcsan_scoped_access *reorder_access = NULL;
125 arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED;
129 if (!reorder_access || !IS_ENABLED(CONFIG_SMP))
132 #define __KCSAN_CHECK_BARRIER(access_type, barrier, name) \
134 reorder_access->type = (access_type) | KCSAN_ACCESS_SCOPED; \
135 reorder_access->size = 1; \
137 if (reorder_access->size != 0) { \
138 pr_err("improperly instrumented type=(" #access_type "): " name "\n"); \
142 #define KCSAN_CHECK_READ_BARRIER(b) __KCSAN_CHECK_BARRIER(0, b, #b)
143 #define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
144 #define KCSAN_CHECK_RW_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
146 kcsan_nestable_atomic_begin(); /* No watchpoints in called functions. */
148 KCSAN_CHECK_READ_BARRIER(mb());
149 KCSAN_CHECK_READ_BARRIER(rmb());
150 KCSAN_CHECK_READ_BARRIER(smp_mb());
151 KCSAN_CHECK_READ_BARRIER(smp_rmb());
152 KCSAN_CHECK_READ_BARRIER(dma_rmb());
153 KCSAN_CHECK_READ_BARRIER(smp_mb__before_atomic());
154 KCSAN_CHECK_READ_BARRIER(smp_mb__after_atomic());
155 KCSAN_CHECK_READ_BARRIER(smp_mb__after_spinlock());
156 KCSAN_CHECK_READ_BARRIER(smp_store_mb(test_var, 0));
157 KCSAN_CHECK_READ_BARRIER(smp_store_release(&test_var, 0));
158 KCSAN_CHECK_READ_BARRIER(xchg(&test_var, 0));
159 KCSAN_CHECK_READ_BARRIER(xchg_release(&test_var, 0));
160 KCSAN_CHECK_READ_BARRIER(cmpxchg(&test_var, 0, 0));
161 KCSAN_CHECK_READ_BARRIER(cmpxchg_release(&test_var, 0, 0));
162 KCSAN_CHECK_READ_BARRIER(atomic_set_release(&dummy, 0));
163 KCSAN_CHECK_READ_BARRIER(atomic_add_return(1, &dummy));
164 KCSAN_CHECK_READ_BARRIER(atomic_add_return_release(1, &dummy));
165 KCSAN_CHECK_READ_BARRIER(atomic_fetch_add(1, &dummy));
166 KCSAN_CHECK_READ_BARRIER(atomic_fetch_add_release(1, &dummy));
167 KCSAN_CHECK_READ_BARRIER(test_and_set_bit(0, &test_var));
168 KCSAN_CHECK_READ_BARRIER(test_and_clear_bit(0, &test_var));
169 KCSAN_CHECK_READ_BARRIER(test_and_change_bit(0, &test_var));
170 KCSAN_CHECK_READ_BARRIER(clear_bit_unlock(0, &test_var));
171 KCSAN_CHECK_READ_BARRIER(__clear_bit_unlock(0, &test_var));
172 arch_spin_lock(&arch_spinlock);
173 KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock));
174 spin_lock(&test_spinlock);
175 KCSAN_CHECK_READ_BARRIER(spin_unlock(&test_spinlock));
177 KCSAN_CHECK_WRITE_BARRIER(mb());
178 KCSAN_CHECK_WRITE_BARRIER(wmb());
179 KCSAN_CHECK_WRITE_BARRIER(smp_mb());
180 KCSAN_CHECK_WRITE_BARRIER(smp_wmb());
181 KCSAN_CHECK_WRITE_BARRIER(dma_wmb());
182 KCSAN_CHECK_WRITE_BARRIER(smp_mb__before_atomic());
183 KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_atomic());
184 KCSAN_CHECK_WRITE_BARRIER(smp_mb__after_spinlock());
185 KCSAN_CHECK_WRITE_BARRIER(smp_store_mb(test_var, 0));
186 KCSAN_CHECK_WRITE_BARRIER(smp_store_release(&test_var, 0));
187 KCSAN_CHECK_WRITE_BARRIER(xchg(&test_var, 0));
188 KCSAN_CHECK_WRITE_BARRIER(xchg_release(&test_var, 0));
189 KCSAN_CHECK_WRITE_BARRIER(cmpxchg(&test_var, 0, 0));
190 KCSAN_CHECK_WRITE_BARRIER(cmpxchg_release(&test_var, 0, 0));
191 KCSAN_CHECK_WRITE_BARRIER(atomic_set_release(&dummy, 0));
192 KCSAN_CHECK_WRITE_BARRIER(atomic_add_return(1, &dummy));
193 KCSAN_CHECK_WRITE_BARRIER(atomic_add_return_release(1, &dummy));
194 KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add(1, &dummy));
195 KCSAN_CHECK_WRITE_BARRIER(atomic_fetch_add_release(1, &dummy));
196 KCSAN_CHECK_WRITE_BARRIER(test_and_set_bit(0, &test_var));
197 KCSAN_CHECK_WRITE_BARRIER(test_and_clear_bit(0, &test_var));
198 KCSAN_CHECK_WRITE_BARRIER(test_and_change_bit(0, &test_var));
199 KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock(0, &test_var));
200 KCSAN_CHECK_WRITE_BARRIER(__clear_bit_unlock(0, &test_var));
201 arch_spin_lock(&arch_spinlock);
202 KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock));
203 spin_lock(&test_spinlock);
204 KCSAN_CHECK_WRITE_BARRIER(spin_unlock(&test_spinlock));
206 KCSAN_CHECK_RW_BARRIER(mb());
207 KCSAN_CHECK_RW_BARRIER(wmb());
208 KCSAN_CHECK_RW_BARRIER(rmb());
209 KCSAN_CHECK_RW_BARRIER(smp_mb());
210 KCSAN_CHECK_RW_BARRIER(smp_wmb());
211 KCSAN_CHECK_RW_BARRIER(smp_rmb());
212 KCSAN_CHECK_RW_BARRIER(dma_wmb());
213 KCSAN_CHECK_RW_BARRIER(dma_rmb());
214 KCSAN_CHECK_RW_BARRIER(smp_mb__before_atomic());
215 KCSAN_CHECK_RW_BARRIER(smp_mb__after_atomic());
216 KCSAN_CHECK_RW_BARRIER(smp_mb__after_spinlock());
217 KCSAN_CHECK_RW_BARRIER(smp_store_mb(test_var, 0));
218 KCSAN_CHECK_RW_BARRIER(smp_store_release(&test_var, 0));
219 KCSAN_CHECK_RW_BARRIER(xchg(&test_var, 0));
220 KCSAN_CHECK_RW_BARRIER(xchg_release(&test_var, 0));
221 KCSAN_CHECK_RW_BARRIER(cmpxchg(&test_var, 0, 0));
222 KCSAN_CHECK_RW_BARRIER(cmpxchg_release(&test_var, 0, 0));
223 KCSAN_CHECK_RW_BARRIER(atomic_set_release(&dummy, 0));
224 KCSAN_CHECK_RW_BARRIER(atomic_add_return(1, &dummy));
225 KCSAN_CHECK_RW_BARRIER(atomic_add_return_release(1, &dummy));
226 KCSAN_CHECK_RW_BARRIER(atomic_fetch_add(1, &dummy));
227 KCSAN_CHECK_RW_BARRIER(atomic_fetch_add_release(1, &dummy));
228 KCSAN_CHECK_RW_BARRIER(test_and_set_bit(0, &test_var));
229 KCSAN_CHECK_RW_BARRIER(test_and_clear_bit(0, &test_var));
230 KCSAN_CHECK_RW_BARRIER(test_and_change_bit(0, &test_var));
231 KCSAN_CHECK_RW_BARRIER(clear_bit_unlock(0, &test_var));
232 KCSAN_CHECK_RW_BARRIER(__clear_bit_unlock(0, &test_var));
233 arch_spin_lock(&arch_spinlock);
234 KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock));
235 spin_lock(&test_spinlock);
236 KCSAN_CHECK_RW_BARRIER(spin_unlock(&test_spinlock));
238 #ifdef clear_bit_unlock_is_negative_byte
239 KCSAN_CHECK_RW_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
240 KCSAN_CHECK_READ_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
241 KCSAN_CHECK_WRITE_BARRIER(clear_bit_unlock_is_negative_byte(0, &test_var));
243 kcsan_nestable_atomic_end();
248 static int __init kcsan_selftest(void)
253 #define RUN_TEST(do_test) \
259 pr_err("selftest: " #do_test " failed"); \
262 RUN_TEST(test_requires);
263 RUN_TEST(test_encode_decode);
264 RUN_TEST(test_matching_access);
265 RUN_TEST(test_barrier);
267 pr_info("selftest: %d/%d tests passed\n", passed, total);
269 panic("selftests failed");
272 postcore_initcall(kcsan_selftest);