1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-instrumented.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
17 #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18 #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
20 #include <linux/build_bug.h>
21 #include <linux/compiler.h>
22 #include <linux/instrumented.h>
24 static __always_inline int
25 atomic_read(const atomic_t *v)
27 instrument_atomic_read(v, sizeof(*v));
28 return arch_atomic_read(v);
30 #define atomic_read atomic_read
32 #if defined(arch_atomic_read_acquire)
33 static __always_inline int
34 atomic_read_acquire(const atomic_t *v)
36 instrument_atomic_read(v, sizeof(*v));
37 return arch_atomic_read_acquire(v);
39 #define atomic_read_acquire atomic_read_acquire
42 static __always_inline void
43 atomic_set(atomic_t *v, int i)
45 instrument_atomic_write(v, sizeof(*v));
46 arch_atomic_set(v, i);
48 #define atomic_set atomic_set
50 #if defined(arch_atomic_set_release)
51 static __always_inline void
52 atomic_set_release(atomic_t *v, int i)
54 instrument_atomic_write(v, sizeof(*v));
55 arch_atomic_set_release(v, i);
57 #define atomic_set_release atomic_set_release
60 static __always_inline void
61 atomic_add(int i, atomic_t *v)
63 instrument_atomic_read_write(v, sizeof(*v));
64 arch_atomic_add(i, v);
66 #define atomic_add atomic_add
68 #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
69 static __always_inline int
70 atomic_add_return(int i, atomic_t *v)
72 instrument_atomic_read_write(v, sizeof(*v));
73 return arch_atomic_add_return(i, v);
75 #define atomic_add_return atomic_add_return
78 #if defined(arch_atomic_add_return_acquire)
79 static __always_inline int
80 atomic_add_return_acquire(int i, atomic_t *v)
82 instrument_atomic_read_write(v, sizeof(*v));
83 return arch_atomic_add_return_acquire(i, v);
85 #define atomic_add_return_acquire atomic_add_return_acquire
88 #if defined(arch_atomic_add_return_release)
89 static __always_inline int
90 atomic_add_return_release(int i, atomic_t *v)
92 instrument_atomic_read_write(v, sizeof(*v));
93 return arch_atomic_add_return_release(i, v);
95 #define atomic_add_return_release atomic_add_return_release
98 #if defined(arch_atomic_add_return_relaxed)
99 static __always_inline int
100 atomic_add_return_relaxed(int i, atomic_t *v)
102 instrument_atomic_read_write(v, sizeof(*v));
103 return arch_atomic_add_return_relaxed(i, v);
105 #define atomic_add_return_relaxed atomic_add_return_relaxed
108 #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
109 static __always_inline int
110 atomic_fetch_add(int i, atomic_t *v)
112 instrument_atomic_read_write(v, sizeof(*v));
113 return arch_atomic_fetch_add(i, v);
115 #define atomic_fetch_add atomic_fetch_add
118 #if defined(arch_atomic_fetch_add_acquire)
119 static __always_inline int
120 atomic_fetch_add_acquire(int i, atomic_t *v)
122 instrument_atomic_read_write(v, sizeof(*v));
123 return arch_atomic_fetch_add_acquire(i, v);
125 #define atomic_fetch_add_acquire atomic_fetch_add_acquire
128 #if defined(arch_atomic_fetch_add_release)
129 static __always_inline int
130 atomic_fetch_add_release(int i, atomic_t *v)
132 instrument_atomic_read_write(v, sizeof(*v));
133 return arch_atomic_fetch_add_release(i, v);
135 #define atomic_fetch_add_release atomic_fetch_add_release
138 #if defined(arch_atomic_fetch_add_relaxed)
139 static __always_inline int
140 atomic_fetch_add_relaxed(int i, atomic_t *v)
142 instrument_atomic_read_write(v, sizeof(*v));
143 return arch_atomic_fetch_add_relaxed(i, v);
145 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
148 static __always_inline void
149 atomic_sub(int i, atomic_t *v)
151 instrument_atomic_read_write(v, sizeof(*v));
152 arch_atomic_sub(i, v);
154 #define atomic_sub atomic_sub
156 #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
157 static __always_inline int
158 atomic_sub_return(int i, atomic_t *v)
160 instrument_atomic_read_write(v, sizeof(*v));
161 return arch_atomic_sub_return(i, v);
163 #define atomic_sub_return atomic_sub_return
166 #if defined(arch_atomic_sub_return_acquire)
167 static __always_inline int
168 atomic_sub_return_acquire(int i, atomic_t *v)
170 instrument_atomic_read_write(v, sizeof(*v));
171 return arch_atomic_sub_return_acquire(i, v);
173 #define atomic_sub_return_acquire atomic_sub_return_acquire
176 #if defined(arch_atomic_sub_return_release)
177 static __always_inline int
178 atomic_sub_return_release(int i, atomic_t *v)
180 instrument_atomic_read_write(v, sizeof(*v));
181 return arch_atomic_sub_return_release(i, v);
183 #define atomic_sub_return_release atomic_sub_return_release
186 #if defined(arch_atomic_sub_return_relaxed)
187 static __always_inline int
188 atomic_sub_return_relaxed(int i, atomic_t *v)
190 instrument_atomic_read_write(v, sizeof(*v));
191 return arch_atomic_sub_return_relaxed(i, v);
193 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
196 #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
197 static __always_inline int
198 atomic_fetch_sub(int i, atomic_t *v)
200 instrument_atomic_read_write(v, sizeof(*v));
201 return arch_atomic_fetch_sub(i, v);
203 #define atomic_fetch_sub atomic_fetch_sub
206 #if defined(arch_atomic_fetch_sub_acquire)
207 static __always_inline int
208 atomic_fetch_sub_acquire(int i, atomic_t *v)
210 instrument_atomic_read_write(v, sizeof(*v));
211 return arch_atomic_fetch_sub_acquire(i, v);
213 #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
216 #if defined(arch_atomic_fetch_sub_release)
217 static __always_inline int
218 atomic_fetch_sub_release(int i, atomic_t *v)
220 instrument_atomic_read_write(v, sizeof(*v));
221 return arch_atomic_fetch_sub_release(i, v);
223 #define atomic_fetch_sub_release atomic_fetch_sub_release
226 #if defined(arch_atomic_fetch_sub_relaxed)
227 static __always_inline int
228 atomic_fetch_sub_relaxed(int i, atomic_t *v)
230 instrument_atomic_read_write(v, sizeof(*v));
231 return arch_atomic_fetch_sub_relaxed(i, v);
233 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
236 #if defined(arch_atomic_inc)
237 static __always_inline void
238 atomic_inc(atomic_t *v)
240 instrument_atomic_read_write(v, sizeof(*v));
243 #define atomic_inc atomic_inc
246 #if defined(arch_atomic_inc_return)
247 static __always_inline int
248 atomic_inc_return(atomic_t *v)
250 instrument_atomic_read_write(v, sizeof(*v));
251 return arch_atomic_inc_return(v);
253 #define atomic_inc_return atomic_inc_return
256 #if defined(arch_atomic_inc_return_acquire)
257 static __always_inline int
258 atomic_inc_return_acquire(atomic_t *v)
260 instrument_atomic_read_write(v, sizeof(*v));
261 return arch_atomic_inc_return_acquire(v);
263 #define atomic_inc_return_acquire atomic_inc_return_acquire
266 #if defined(arch_atomic_inc_return_release)
267 static __always_inline int
268 atomic_inc_return_release(atomic_t *v)
270 instrument_atomic_read_write(v, sizeof(*v));
271 return arch_atomic_inc_return_release(v);
273 #define atomic_inc_return_release atomic_inc_return_release
276 #if defined(arch_atomic_inc_return_relaxed)
277 static __always_inline int
278 atomic_inc_return_relaxed(atomic_t *v)
280 instrument_atomic_read_write(v, sizeof(*v));
281 return arch_atomic_inc_return_relaxed(v);
283 #define atomic_inc_return_relaxed atomic_inc_return_relaxed
286 #if defined(arch_atomic_fetch_inc)
287 static __always_inline int
288 atomic_fetch_inc(atomic_t *v)
290 instrument_atomic_read_write(v, sizeof(*v));
291 return arch_atomic_fetch_inc(v);
293 #define atomic_fetch_inc atomic_fetch_inc
296 #if defined(arch_atomic_fetch_inc_acquire)
297 static __always_inline int
298 atomic_fetch_inc_acquire(atomic_t *v)
300 instrument_atomic_read_write(v, sizeof(*v));
301 return arch_atomic_fetch_inc_acquire(v);
303 #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
306 #if defined(arch_atomic_fetch_inc_release)
307 static __always_inline int
308 atomic_fetch_inc_release(atomic_t *v)
310 instrument_atomic_read_write(v, sizeof(*v));
311 return arch_atomic_fetch_inc_release(v);
313 #define atomic_fetch_inc_release atomic_fetch_inc_release
316 #if defined(arch_atomic_fetch_inc_relaxed)
317 static __always_inline int
318 atomic_fetch_inc_relaxed(atomic_t *v)
320 instrument_atomic_read_write(v, sizeof(*v));
321 return arch_atomic_fetch_inc_relaxed(v);
323 #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
326 #if defined(arch_atomic_dec)
327 static __always_inline void
328 atomic_dec(atomic_t *v)
330 instrument_atomic_read_write(v, sizeof(*v));
333 #define atomic_dec atomic_dec
336 #if defined(arch_atomic_dec_return)
337 static __always_inline int
338 atomic_dec_return(atomic_t *v)
340 instrument_atomic_read_write(v, sizeof(*v));
341 return arch_atomic_dec_return(v);
343 #define atomic_dec_return atomic_dec_return
346 #if defined(arch_atomic_dec_return_acquire)
347 static __always_inline int
348 atomic_dec_return_acquire(atomic_t *v)
350 instrument_atomic_read_write(v, sizeof(*v));
351 return arch_atomic_dec_return_acquire(v);
353 #define atomic_dec_return_acquire atomic_dec_return_acquire
356 #if defined(arch_atomic_dec_return_release)
357 static __always_inline int
358 atomic_dec_return_release(atomic_t *v)
360 instrument_atomic_read_write(v, sizeof(*v));
361 return arch_atomic_dec_return_release(v);
363 #define atomic_dec_return_release atomic_dec_return_release
366 #if defined(arch_atomic_dec_return_relaxed)
367 static __always_inline int
368 atomic_dec_return_relaxed(atomic_t *v)
370 instrument_atomic_read_write(v, sizeof(*v));
371 return arch_atomic_dec_return_relaxed(v);
373 #define atomic_dec_return_relaxed atomic_dec_return_relaxed
376 #if defined(arch_atomic_fetch_dec)
377 static __always_inline int
378 atomic_fetch_dec(atomic_t *v)
380 instrument_atomic_read_write(v, sizeof(*v));
381 return arch_atomic_fetch_dec(v);
383 #define atomic_fetch_dec atomic_fetch_dec
386 #if defined(arch_atomic_fetch_dec_acquire)
387 static __always_inline int
388 atomic_fetch_dec_acquire(atomic_t *v)
390 instrument_atomic_read_write(v, sizeof(*v));
391 return arch_atomic_fetch_dec_acquire(v);
393 #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
396 #if defined(arch_atomic_fetch_dec_release)
397 static __always_inline int
398 atomic_fetch_dec_release(atomic_t *v)
400 instrument_atomic_read_write(v, sizeof(*v));
401 return arch_atomic_fetch_dec_release(v);
403 #define atomic_fetch_dec_release atomic_fetch_dec_release
406 #if defined(arch_atomic_fetch_dec_relaxed)
407 static __always_inline int
408 atomic_fetch_dec_relaxed(atomic_t *v)
410 instrument_atomic_read_write(v, sizeof(*v));
411 return arch_atomic_fetch_dec_relaxed(v);
413 #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
416 static __always_inline void
417 atomic_and(int i, atomic_t *v)
419 instrument_atomic_read_write(v, sizeof(*v));
420 arch_atomic_and(i, v);
422 #define atomic_and atomic_and
424 #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
425 static __always_inline int
426 atomic_fetch_and(int i, atomic_t *v)
428 instrument_atomic_read_write(v, sizeof(*v));
429 return arch_atomic_fetch_and(i, v);
431 #define atomic_fetch_and atomic_fetch_and
434 #if defined(arch_atomic_fetch_and_acquire)
435 static __always_inline int
436 atomic_fetch_and_acquire(int i, atomic_t *v)
438 instrument_atomic_read_write(v, sizeof(*v));
439 return arch_atomic_fetch_and_acquire(i, v);
441 #define atomic_fetch_and_acquire atomic_fetch_and_acquire
444 #if defined(arch_atomic_fetch_and_release)
445 static __always_inline int
446 atomic_fetch_and_release(int i, atomic_t *v)
448 instrument_atomic_read_write(v, sizeof(*v));
449 return arch_atomic_fetch_and_release(i, v);
451 #define atomic_fetch_and_release atomic_fetch_and_release
454 #if defined(arch_atomic_fetch_and_relaxed)
455 static __always_inline int
456 atomic_fetch_and_relaxed(int i, atomic_t *v)
458 instrument_atomic_read_write(v, sizeof(*v));
459 return arch_atomic_fetch_and_relaxed(i, v);
461 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
464 #if defined(arch_atomic_andnot)
465 static __always_inline void
466 atomic_andnot(int i, atomic_t *v)
468 instrument_atomic_read_write(v, sizeof(*v));
469 arch_atomic_andnot(i, v);
471 #define atomic_andnot atomic_andnot
474 #if defined(arch_atomic_fetch_andnot)
475 static __always_inline int
476 atomic_fetch_andnot(int i, atomic_t *v)
478 instrument_atomic_read_write(v, sizeof(*v));
479 return arch_atomic_fetch_andnot(i, v);
481 #define atomic_fetch_andnot atomic_fetch_andnot
484 #if defined(arch_atomic_fetch_andnot_acquire)
485 static __always_inline int
486 atomic_fetch_andnot_acquire(int i, atomic_t *v)
488 instrument_atomic_read_write(v, sizeof(*v));
489 return arch_atomic_fetch_andnot_acquire(i, v);
491 #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
494 #if defined(arch_atomic_fetch_andnot_release)
495 static __always_inline int
496 atomic_fetch_andnot_release(int i, atomic_t *v)
498 instrument_atomic_read_write(v, sizeof(*v));
499 return arch_atomic_fetch_andnot_release(i, v);
501 #define atomic_fetch_andnot_release atomic_fetch_andnot_release
504 #if defined(arch_atomic_fetch_andnot_relaxed)
505 static __always_inline int
506 atomic_fetch_andnot_relaxed(int i, atomic_t *v)
508 instrument_atomic_read_write(v, sizeof(*v));
509 return arch_atomic_fetch_andnot_relaxed(i, v);
511 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
514 static __always_inline void
515 atomic_or(int i, atomic_t *v)
517 instrument_atomic_read_write(v, sizeof(*v));
518 arch_atomic_or(i, v);
520 #define atomic_or atomic_or
522 #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
523 static __always_inline int
524 atomic_fetch_or(int i, atomic_t *v)
526 instrument_atomic_read_write(v, sizeof(*v));
527 return arch_atomic_fetch_or(i, v);
529 #define atomic_fetch_or atomic_fetch_or
532 #if defined(arch_atomic_fetch_or_acquire)
533 static __always_inline int
534 atomic_fetch_or_acquire(int i, atomic_t *v)
536 instrument_atomic_read_write(v, sizeof(*v));
537 return arch_atomic_fetch_or_acquire(i, v);
539 #define atomic_fetch_or_acquire atomic_fetch_or_acquire
542 #if defined(arch_atomic_fetch_or_release)
543 static __always_inline int
544 atomic_fetch_or_release(int i, atomic_t *v)
546 instrument_atomic_read_write(v, sizeof(*v));
547 return arch_atomic_fetch_or_release(i, v);
549 #define atomic_fetch_or_release atomic_fetch_or_release
552 #if defined(arch_atomic_fetch_or_relaxed)
553 static __always_inline int
554 atomic_fetch_or_relaxed(int i, atomic_t *v)
556 instrument_atomic_read_write(v, sizeof(*v));
557 return arch_atomic_fetch_or_relaxed(i, v);
559 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
562 static __always_inline void
563 atomic_xor(int i, atomic_t *v)
565 instrument_atomic_read_write(v, sizeof(*v));
566 arch_atomic_xor(i, v);
568 #define atomic_xor atomic_xor
570 #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
571 static __always_inline int
572 atomic_fetch_xor(int i, atomic_t *v)
574 instrument_atomic_read_write(v, sizeof(*v));
575 return arch_atomic_fetch_xor(i, v);
577 #define atomic_fetch_xor atomic_fetch_xor
580 #if defined(arch_atomic_fetch_xor_acquire)
581 static __always_inline int
582 atomic_fetch_xor_acquire(int i, atomic_t *v)
584 instrument_atomic_read_write(v, sizeof(*v));
585 return arch_atomic_fetch_xor_acquire(i, v);
587 #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
590 #if defined(arch_atomic_fetch_xor_release)
591 static __always_inline int
592 atomic_fetch_xor_release(int i, atomic_t *v)
594 instrument_atomic_read_write(v, sizeof(*v));
595 return arch_atomic_fetch_xor_release(i, v);
597 #define atomic_fetch_xor_release atomic_fetch_xor_release
600 #if defined(arch_atomic_fetch_xor_relaxed)
601 static __always_inline int
602 atomic_fetch_xor_relaxed(int i, atomic_t *v)
604 instrument_atomic_read_write(v, sizeof(*v));
605 return arch_atomic_fetch_xor_relaxed(i, v);
607 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
610 #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
611 static __always_inline int
612 atomic_xchg(atomic_t *v, int i)
614 instrument_atomic_read_write(v, sizeof(*v));
615 return arch_atomic_xchg(v, i);
617 #define atomic_xchg atomic_xchg
620 #if defined(arch_atomic_xchg_acquire)
621 static __always_inline int
622 atomic_xchg_acquire(atomic_t *v, int i)
624 instrument_atomic_read_write(v, sizeof(*v));
625 return arch_atomic_xchg_acquire(v, i);
627 #define atomic_xchg_acquire atomic_xchg_acquire
630 #if defined(arch_atomic_xchg_release)
631 static __always_inline int
632 atomic_xchg_release(atomic_t *v, int i)
634 instrument_atomic_read_write(v, sizeof(*v));
635 return arch_atomic_xchg_release(v, i);
637 #define atomic_xchg_release atomic_xchg_release
640 #if defined(arch_atomic_xchg_relaxed)
641 static __always_inline int
642 atomic_xchg_relaxed(atomic_t *v, int i)
644 instrument_atomic_read_write(v, sizeof(*v));
645 return arch_atomic_xchg_relaxed(v, i);
647 #define atomic_xchg_relaxed atomic_xchg_relaxed
650 #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
651 static __always_inline int
652 atomic_cmpxchg(atomic_t *v, int old, int new)
654 instrument_atomic_read_write(v, sizeof(*v));
655 return arch_atomic_cmpxchg(v, old, new);
657 #define atomic_cmpxchg atomic_cmpxchg
660 #if defined(arch_atomic_cmpxchg_acquire)
661 static __always_inline int
662 atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
664 instrument_atomic_read_write(v, sizeof(*v));
665 return arch_atomic_cmpxchg_acquire(v, old, new);
667 #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
670 #if defined(arch_atomic_cmpxchg_release)
671 static __always_inline int
672 atomic_cmpxchg_release(atomic_t *v, int old, int new)
674 instrument_atomic_read_write(v, sizeof(*v));
675 return arch_atomic_cmpxchg_release(v, old, new);
677 #define atomic_cmpxchg_release atomic_cmpxchg_release
680 #if defined(arch_atomic_cmpxchg_relaxed)
681 static __always_inline int
682 atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
684 instrument_atomic_read_write(v, sizeof(*v));
685 return arch_atomic_cmpxchg_relaxed(v, old, new);
687 #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
690 #if defined(arch_atomic_try_cmpxchg)
691 static __always_inline bool
692 atomic_try_cmpxchg(atomic_t *v, int *old, int new)
694 instrument_atomic_read_write(v, sizeof(*v));
695 instrument_atomic_read_write(old, sizeof(*old));
696 return arch_atomic_try_cmpxchg(v, old, new);
698 #define atomic_try_cmpxchg atomic_try_cmpxchg
701 #if defined(arch_atomic_try_cmpxchg_acquire)
702 static __always_inline bool
703 atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
705 instrument_atomic_read_write(v, sizeof(*v));
706 instrument_atomic_read_write(old, sizeof(*old));
707 return arch_atomic_try_cmpxchg_acquire(v, old, new);
709 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
712 #if defined(arch_atomic_try_cmpxchg_release)
713 static __always_inline bool
714 atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
716 instrument_atomic_read_write(v, sizeof(*v));
717 instrument_atomic_read_write(old, sizeof(*old));
718 return arch_atomic_try_cmpxchg_release(v, old, new);
720 #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
723 #if defined(arch_atomic_try_cmpxchg_relaxed)
724 static __always_inline bool
725 atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
727 instrument_atomic_read_write(v, sizeof(*v));
728 instrument_atomic_read_write(old, sizeof(*old));
729 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
731 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
734 #if defined(arch_atomic_sub_and_test)
735 static __always_inline bool
736 atomic_sub_and_test(int i, atomic_t *v)
738 instrument_atomic_read_write(v, sizeof(*v));
739 return arch_atomic_sub_and_test(i, v);
741 #define atomic_sub_and_test atomic_sub_and_test
744 #if defined(arch_atomic_dec_and_test)
745 static __always_inline bool
746 atomic_dec_and_test(atomic_t *v)
748 instrument_atomic_read_write(v, sizeof(*v));
749 return arch_atomic_dec_and_test(v);
751 #define atomic_dec_and_test atomic_dec_and_test
754 #if defined(arch_atomic_inc_and_test)
755 static __always_inline bool
756 atomic_inc_and_test(atomic_t *v)
758 instrument_atomic_read_write(v, sizeof(*v));
759 return arch_atomic_inc_and_test(v);
761 #define atomic_inc_and_test atomic_inc_and_test
764 #if defined(arch_atomic_add_negative)
765 static __always_inline bool
766 atomic_add_negative(int i, atomic_t *v)
768 instrument_atomic_read_write(v, sizeof(*v));
769 return arch_atomic_add_negative(i, v);
771 #define atomic_add_negative atomic_add_negative
774 #if defined(arch_atomic_fetch_add_unless)
775 static __always_inline int
776 atomic_fetch_add_unless(atomic_t *v, int a, int u)
778 instrument_atomic_read_write(v, sizeof(*v));
779 return arch_atomic_fetch_add_unless(v, a, u);
781 #define atomic_fetch_add_unless atomic_fetch_add_unless
784 #if defined(arch_atomic_add_unless)
785 static __always_inline bool
786 atomic_add_unless(atomic_t *v, int a, int u)
788 instrument_atomic_read_write(v, sizeof(*v));
789 return arch_atomic_add_unless(v, a, u);
791 #define atomic_add_unless atomic_add_unless
794 #if defined(arch_atomic_inc_not_zero)
795 static __always_inline bool
796 atomic_inc_not_zero(atomic_t *v)
798 instrument_atomic_read_write(v, sizeof(*v));
799 return arch_atomic_inc_not_zero(v);
801 #define atomic_inc_not_zero atomic_inc_not_zero
804 #if defined(arch_atomic_inc_unless_negative)
805 static __always_inline bool
806 atomic_inc_unless_negative(atomic_t *v)
808 instrument_atomic_read_write(v, sizeof(*v));
809 return arch_atomic_inc_unless_negative(v);
811 #define atomic_inc_unless_negative atomic_inc_unless_negative
814 #if defined(arch_atomic_dec_unless_positive)
815 static __always_inline bool
816 atomic_dec_unless_positive(atomic_t *v)
818 instrument_atomic_read_write(v, sizeof(*v));
819 return arch_atomic_dec_unless_positive(v);
821 #define atomic_dec_unless_positive atomic_dec_unless_positive
824 #if defined(arch_atomic_dec_if_positive)
825 static __always_inline int
826 atomic_dec_if_positive(atomic_t *v)
828 instrument_atomic_read_write(v, sizeof(*v));
829 return arch_atomic_dec_if_positive(v);
831 #define atomic_dec_if_positive atomic_dec_if_positive
834 static __always_inline s64
835 atomic64_read(const atomic64_t *v)
837 instrument_atomic_read(v, sizeof(*v));
838 return arch_atomic64_read(v);
840 #define atomic64_read atomic64_read
842 #if defined(arch_atomic64_read_acquire)
843 static __always_inline s64
844 atomic64_read_acquire(const atomic64_t *v)
846 instrument_atomic_read(v, sizeof(*v));
847 return arch_atomic64_read_acquire(v);
849 #define atomic64_read_acquire atomic64_read_acquire
852 static __always_inline void
853 atomic64_set(atomic64_t *v, s64 i)
855 instrument_atomic_write(v, sizeof(*v));
856 arch_atomic64_set(v, i);
858 #define atomic64_set atomic64_set
860 #if defined(arch_atomic64_set_release)
861 static __always_inline void
862 atomic64_set_release(atomic64_t *v, s64 i)
864 instrument_atomic_write(v, sizeof(*v));
865 arch_atomic64_set_release(v, i);
867 #define atomic64_set_release atomic64_set_release
870 static __always_inline void
871 atomic64_add(s64 i, atomic64_t *v)
873 instrument_atomic_read_write(v, sizeof(*v));
874 arch_atomic64_add(i, v);
876 #define atomic64_add atomic64_add
878 #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
879 static __always_inline s64
880 atomic64_add_return(s64 i, atomic64_t *v)
882 instrument_atomic_read_write(v, sizeof(*v));
883 return arch_atomic64_add_return(i, v);
885 #define atomic64_add_return atomic64_add_return
888 #if defined(arch_atomic64_add_return_acquire)
889 static __always_inline s64
890 atomic64_add_return_acquire(s64 i, atomic64_t *v)
892 instrument_atomic_read_write(v, sizeof(*v));
893 return arch_atomic64_add_return_acquire(i, v);
895 #define atomic64_add_return_acquire atomic64_add_return_acquire
898 #if defined(arch_atomic64_add_return_release)
899 static __always_inline s64
900 atomic64_add_return_release(s64 i, atomic64_t *v)
902 instrument_atomic_read_write(v, sizeof(*v));
903 return arch_atomic64_add_return_release(i, v);
905 #define atomic64_add_return_release atomic64_add_return_release
908 #if defined(arch_atomic64_add_return_relaxed)
909 static __always_inline s64
910 atomic64_add_return_relaxed(s64 i, atomic64_t *v)
912 instrument_atomic_read_write(v, sizeof(*v));
913 return arch_atomic64_add_return_relaxed(i, v);
915 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
918 #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
919 static __always_inline s64
920 atomic64_fetch_add(s64 i, atomic64_t *v)
922 instrument_atomic_read_write(v, sizeof(*v));
923 return arch_atomic64_fetch_add(i, v);
925 #define atomic64_fetch_add atomic64_fetch_add
928 #if defined(arch_atomic64_fetch_add_acquire)
929 static __always_inline s64
930 atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
932 instrument_atomic_read_write(v, sizeof(*v));
933 return arch_atomic64_fetch_add_acquire(i, v);
935 #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
938 #if defined(arch_atomic64_fetch_add_release)
939 static __always_inline s64
940 atomic64_fetch_add_release(s64 i, atomic64_t *v)
942 instrument_atomic_read_write(v, sizeof(*v));
943 return arch_atomic64_fetch_add_release(i, v);
945 #define atomic64_fetch_add_release atomic64_fetch_add_release
948 #if defined(arch_atomic64_fetch_add_relaxed)
949 static __always_inline s64
950 atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
952 instrument_atomic_read_write(v, sizeof(*v));
953 return arch_atomic64_fetch_add_relaxed(i, v);
955 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
958 static __always_inline void
959 atomic64_sub(s64 i, atomic64_t *v)
961 instrument_atomic_read_write(v, sizeof(*v));
962 arch_atomic64_sub(i, v);
964 #define atomic64_sub atomic64_sub
966 #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
967 static __always_inline s64
968 atomic64_sub_return(s64 i, atomic64_t *v)
970 instrument_atomic_read_write(v, sizeof(*v));
971 return arch_atomic64_sub_return(i, v);
973 #define atomic64_sub_return atomic64_sub_return
976 #if defined(arch_atomic64_sub_return_acquire)
977 static __always_inline s64
978 atomic64_sub_return_acquire(s64 i, atomic64_t *v)
980 instrument_atomic_read_write(v, sizeof(*v));
981 return arch_atomic64_sub_return_acquire(i, v);
983 #define atomic64_sub_return_acquire atomic64_sub_return_acquire
986 #if defined(arch_atomic64_sub_return_release)
987 static __always_inline s64
988 atomic64_sub_return_release(s64 i, atomic64_t *v)
990 instrument_atomic_read_write(v, sizeof(*v));
991 return arch_atomic64_sub_return_release(i, v);
993 #define atomic64_sub_return_release atomic64_sub_return_release
996 #if defined(arch_atomic64_sub_return_relaxed)
997 static __always_inline s64
998 atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
1000 instrument_atomic_read_write(v, sizeof(*v));
1001 return arch_atomic64_sub_return_relaxed(i, v);
1003 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1006 #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1007 static __always_inline s64
1008 atomic64_fetch_sub(s64 i, atomic64_t *v)
1010 instrument_atomic_read_write(v, sizeof(*v));
1011 return arch_atomic64_fetch_sub(i, v);
1013 #define atomic64_fetch_sub atomic64_fetch_sub
1016 #if defined(arch_atomic64_fetch_sub_acquire)
1017 static __always_inline s64
1018 atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1020 instrument_atomic_read_write(v, sizeof(*v));
1021 return arch_atomic64_fetch_sub_acquire(i, v);
1023 #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1026 #if defined(arch_atomic64_fetch_sub_release)
1027 static __always_inline s64
1028 atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1030 instrument_atomic_read_write(v, sizeof(*v));
1031 return arch_atomic64_fetch_sub_release(i, v);
1033 #define atomic64_fetch_sub_release atomic64_fetch_sub_release
1036 #if defined(arch_atomic64_fetch_sub_relaxed)
1037 static __always_inline s64
1038 atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
1040 instrument_atomic_read_write(v, sizeof(*v));
1041 return arch_atomic64_fetch_sub_relaxed(i, v);
1043 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1046 #if defined(arch_atomic64_inc)
1047 static __always_inline void
1048 atomic64_inc(atomic64_t *v)
1050 instrument_atomic_read_write(v, sizeof(*v));
1051 arch_atomic64_inc(v);
1053 #define atomic64_inc atomic64_inc
1056 #if defined(arch_atomic64_inc_return)
1057 static __always_inline s64
1058 atomic64_inc_return(atomic64_t *v)
1060 instrument_atomic_read_write(v, sizeof(*v));
1061 return arch_atomic64_inc_return(v);
1063 #define atomic64_inc_return atomic64_inc_return
1066 #if defined(arch_atomic64_inc_return_acquire)
1067 static __always_inline s64
1068 atomic64_inc_return_acquire(atomic64_t *v)
1070 instrument_atomic_read_write(v, sizeof(*v));
1071 return arch_atomic64_inc_return_acquire(v);
1073 #define atomic64_inc_return_acquire atomic64_inc_return_acquire
1076 #if defined(arch_atomic64_inc_return_release)
1077 static __always_inline s64
1078 atomic64_inc_return_release(atomic64_t *v)
1080 instrument_atomic_read_write(v, sizeof(*v));
1081 return arch_atomic64_inc_return_release(v);
1083 #define atomic64_inc_return_release atomic64_inc_return_release
1086 #if defined(arch_atomic64_inc_return_relaxed)
1087 static __always_inline s64
1088 atomic64_inc_return_relaxed(atomic64_t *v)
1090 instrument_atomic_read_write(v, sizeof(*v));
1091 return arch_atomic64_inc_return_relaxed(v);
1093 #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1096 #if defined(arch_atomic64_fetch_inc)
1097 static __always_inline s64
1098 atomic64_fetch_inc(atomic64_t *v)
1100 instrument_atomic_read_write(v, sizeof(*v));
1101 return arch_atomic64_fetch_inc(v);
1103 #define atomic64_fetch_inc atomic64_fetch_inc
1106 #if defined(arch_atomic64_fetch_inc_acquire)
1107 static __always_inline s64
1108 atomic64_fetch_inc_acquire(atomic64_t *v)
1110 instrument_atomic_read_write(v, sizeof(*v));
1111 return arch_atomic64_fetch_inc_acquire(v);
1113 #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1116 #if defined(arch_atomic64_fetch_inc_release)
1117 static __always_inline s64
1118 atomic64_fetch_inc_release(atomic64_t *v)
1120 instrument_atomic_read_write(v, sizeof(*v));
1121 return arch_atomic64_fetch_inc_release(v);
1123 #define atomic64_fetch_inc_release atomic64_fetch_inc_release
1126 #if defined(arch_atomic64_fetch_inc_relaxed)
1127 static __always_inline s64
1128 atomic64_fetch_inc_relaxed(atomic64_t *v)
1130 instrument_atomic_read_write(v, sizeof(*v));
1131 return arch_atomic64_fetch_inc_relaxed(v);
1133 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1136 #if defined(arch_atomic64_dec)
1137 static __always_inline void
1138 atomic64_dec(atomic64_t *v)
1140 instrument_atomic_read_write(v, sizeof(*v));
1141 arch_atomic64_dec(v);
1143 #define atomic64_dec atomic64_dec
1146 #if defined(arch_atomic64_dec_return)
1147 static __always_inline s64
1148 atomic64_dec_return(atomic64_t *v)
1150 instrument_atomic_read_write(v, sizeof(*v));
1151 return arch_atomic64_dec_return(v);
1153 #define atomic64_dec_return atomic64_dec_return
1156 #if defined(arch_atomic64_dec_return_acquire)
1157 static __always_inline s64
1158 atomic64_dec_return_acquire(atomic64_t *v)
1160 instrument_atomic_read_write(v, sizeof(*v));
1161 return arch_atomic64_dec_return_acquire(v);
1163 #define atomic64_dec_return_acquire atomic64_dec_return_acquire
1166 #if defined(arch_atomic64_dec_return_release)
1167 static __always_inline s64
1168 atomic64_dec_return_release(atomic64_t *v)
1170 instrument_atomic_read_write(v, sizeof(*v));
1171 return arch_atomic64_dec_return_release(v);
1173 #define atomic64_dec_return_release atomic64_dec_return_release
1176 #if defined(arch_atomic64_dec_return_relaxed)
1177 static __always_inline s64
1178 atomic64_dec_return_relaxed(atomic64_t *v)
1180 instrument_atomic_read_write(v, sizeof(*v));
1181 return arch_atomic64_dec_return_relaxed(v);
1183 #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1186 #if defined(arch_atomic64_fetch_dec)
1187 static __always_inline s64
1188 atomic64_fetch_dec(atomic64_t *v)
1190 instrument_atomic_read_write(v, sizeof(*v));
1191 return arch_atomic64_fetch_dec(v);
1193 #define atomic64_fetch_dec atomic64_fetch_dec
1196 #if defined(arch_atomic64_fetch_dec_acquire)
1197 static __always_inline s64
1198 atomic64_fetch_dec_acquire(atomic64_t *v)
1200 instrument_atomic_read_write(v, sizeof(*v));
1201 return arch_atomic64_fetch_dec_acquire(v);
1203 #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1206 #if defined(arch_atomic64_fetch_dec_release)
1207 static __always_inline s64
1208 atomic64_fetch_dec_release(atomic64_t *v)
1210 instrument_atomic_read_write(v, sizeof(*v));
1211 return arch_atomic64_fetch_dec_release(v);
1213 #define atomic64_fetch_dec_release atomic64_fetch_dec_release
1216 #if defined(arch_atomic64_fetch_dec_relaxed)
1217 static __always_inline s64
1218 atomic64_fetch_dec_relaxed(atomic64_t *v)
1220 instrument_atomic_read_write(v, sizeof(*v));
1221 return arch_atomic64_fetch_dec_relaxed(v);
1223 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1226 static __always_inline void
1227 atomic64_and(s64 i, atomic64_t *v)
1229 instrument_atomic_read_write(v, sizeof(*v));
1230 arch_atomic64_and(i, v);
1232 #define atomic64_and atomic64_and
1234 #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1235 static __always_inline s64
1236 atomic64_fetch_and(s64 i, atomic64_t *v)
1238 instrument_atomic_read_write(v, sizeof(*v));
1239 return arch_atomic64_fetch_and(i, v);
1241 #define atomic64_fetch_and atomic64_fetch_and
1244 #if defined(arch_atomic64_fetch_and_acquire)
1245 static __always_inline s64
1246 atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1248 instrument_atomic_read_write(v, sizeof(*v));
1249 return arch_atomic64_fetch_and_acquire(i, v);
1251 #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1254 #if defined(arch_atomic64_fetch_and_release)
1255 static __always_inline s64
1256 atomic64_fetch_and_release(s64 i, atomic64_t *v)
1258 instrument_atomic_read_write(v, sizeof(*v));
1259 return arch_atomic64_fetch_and_release(i, v);
1261 #define atomic64_fetch_and_release atomic64_fetch_and_release
1264 #if defined(arch_atomic64_fetch_and_relaxed)
1265 static __always_inline s64
1266 atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
1268 instrument_atomic_read_write(v, sizeof(*v));
1269 return arch_atomic64_fetch_and_relaxed(i, v);
1271 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1274 #if defined(arch_atomic64_andnot)
1275 static __always_inline void
1276 atomic64_andnot(s64 i, atomic64_t *v)
1278 instrument_atomic_read_write(v, sizeof(*v));
1279 arch_atomic64_andnot(i, v);
1281 #define atomic64_andnot atomic64_andnot
1284 #if defined(arch_atomic64_fetch_andnot)
1285 static __always_inline s64
1286 atomic64_fetch_andnot(s64 i, atomic64_t *v)
1288 instrument_atomic_read_write(v, sizeof(*v));
1289 return arch_atomic64_fetch_andnot(i, v);
1291 #define atomic64_fetch_andnot atomic64_fetch_andnot
1294 #if defined(arch_atomic64_fetch_andnot_acquire)
1295 static __always_inline s64
1296 atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1298 instrument_atomic_read_write(v, sizeof(*v));
1299 return arch_atomic64_fetch_andnot_acquire(i, v);
1301 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1304 #if defined(arch_atomic64_fetch_andnot_release)
1305 static __always_inline s64
1306 atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1308 instrument_atomic_read_write(v, sizeof(*v));
1309 return arch_atomic64_fetch_andnot_release(i, v);
1311 #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1314 #if defined(arch_atomic64_fetch_andnot_relaxed)
1315 static __always_inline s64
1316 atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1318 instrument_atomic_read_write(v, sizeof(*v));
1319 return arch_atomic64_fetch_andnot_relaxed(i, v);
1321 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1324 static __always_inline void
1325 atomic64_or(s64 i, atomic64_t *v)
1327 instrument_atomic_read_write(v, sizeof(*v));
1328 arch_atomic64_or(i, v);
1330 #define atomic64_or atomic64_or
1332 #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1333 static __always_inline s64
1334 atomic64_fetch_or(s64 i, atomic64_t *v)
1336 instrument_atomic_read_write(v, sizeof(*v));
1337 return arch_atomic64_fetch_or(i, v);
1339 #define atomic64_fetch_or atomic64_fetch_or
1342 #if defined(arch_atomic64_fetch_or_acquire)
1343 static __always_inline s64
1344 atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1346 instrument_atomic_read_write(v, sizeof(*v));
1347 return arch_atomic64_fetch_or_acquire(i, v);
1349 #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
1352 #if defined(arch_atomic64_fetch_or_release)
1353 static __always_inline s64
1354 atomic64_fetch_or_release(s64 i, atomic64_t *v)
1356 instrument_atomic_read_write(v, sizeof(*v));
1357 return arch_atomic64_fetch_or_release(i, v);
1359 #define atomic64_fetch_or_release atomic64_fetch_or_release
1362 #if defined(arch_atomic64_fetch_or_relaxed)
1363 static __always_inline s64
1364 atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
1366 instrument_atomic_read_write(v, sizeof(*v));
1367 return arch_atomic64_fetch_or_relaxed(i, v);
1369 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
1372 static __always_inline void
1373 atomic64_xor(s64 i, atomic64_t *v)
1375 instrument_atomic_read_write(v, sizeof(*v));
1376 arch_atomic64_xor(i, v);
1378 #define atomic64_xor atomic64_xor
1380 #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1381 static __always_inline s64
1382 atomic64_fetch_xor(s64 i, atomic64_t *v)
1384 instrument_atomic_read_write(v, sizeof(*v));
1385 return arch_atomic64_fetch_xor(i, v);
1387 #define atomic64_fetch_xor atomic64_fetch_xor
1390 #if defined(arch_atomic64_fetch_xor_acquire)
1391 static __always_inline s64
1392 atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1394 instrument_atomic_read_write(v, sizeof(*v));
1395 return arch_atomic64_fetch_xor_acquire(i, v);
1397 #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1400 #if defined(arch_atomic64_fetch_xor_release)
1401 static __always_inline s64
1402 atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1404 instrument_atomic_read_write(v, sizeof(*v));
1405 return arch_atomic64_fetch_xor_release(i, v);
1407 #define atomic64_fetch_xor_release atomic64_fetch_xor_release
1410 #if defined(arch_atomic64_fetch_xor_relaxed)
1411 static __always_inline s64
1412 atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
1414 instrument_atomic_read_write(v, sizeof(*v));
1415 return arch_atomic64_fetch_xor_relaxed(i, v);
1417 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1420 #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1421 static __always_inline s64
1422 atomic64_xchg(atomic64_t *v, s64 i)
1424 instrument_atomic_read_write(v, sizeof(*v));
1425 return arch_atomic64_xchg(v, i);
1427 #define atomic64_xchg atomic64_xchg
1430 #if defined(arch_atomic64_xchg_acquire)
1431 static __always_inline s64
1432 atomic64_xchg_acquire(atomic64_t *v, s64 i)
1434 instrument_atomic_read_write(v, sizeof(*v));
1435 return arch_atomic64_xchg_acquire(v, i);
1437 #define atomic64_xchg_acquire atomic64_xchg_acquire
1440 #if defined(arch_atomic64_xchg_release)
1441 static __always_inline s64
1442 atomic64_xchg_release(atomic64_t *v, s64 i)
1444 instrument_atomic_read_write(v, sizeof(*v));
1445 return arch_atomic64_xchg_release(v, i);
1447 #define atomic64_xchg_release atomic64_xchg_release
1450 #if defined(arch_atomic64_xchg_relaxed)
1451 static __always_inline s64
1452 atomic64_xchg_relaxed(atomic64_t *v, s64 i)
1454 instrument_atomic_read_write(v, sizeof(*v));
1455 return arch_atomic64_xchg_relaxed(v, i);
1457 #define atomic64_xchg_relaxed atomic64_xchg_relaxed
1460 #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1461 static __always_inline s64
1462 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
1464 instrument_atomic_read_write(v, sizeof(*v));
1465 return arch_atomic64_cmpxchg(v, old, new);
1467 #define atomic64_cmpxchg atomic64_cmpxchg
1470 #if defined(arch_atomic64_cmpxchg_acquire)
1471 static __always_inline s64
1472 atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1474 instrument_atomic_read_write(v, sizeof(*v));
1475 return arch_atomic64_cmpxchg_acquire(v, old, new);
1477 #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1480 #if defined(arch_atomic64_cmpxchg_release)
1481 static __always_inline s64
1482 atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1484 instrument_atomic_read_write(v, sizeof(*v));
1485 return arch_atomic64_cmpxchg_release(v, old, new);
1487 #define atomic64_cmpxchg_release atomic64_cmpxchg_release
1490 #if defined(arch_atomic64_cmpxchg_relaxed)
1491 static __always_inline s64
1492 atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
1494 instrument_atomic_read_write(v, sizeof(*v));
1495 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1497 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1500 #if defined(arch_atomic64_try_cmpxchg)
1501 static __always_inline bool
1502 atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
1504 instrument_atomic_read_write(v, sizeof(*v));
1505 instrument_atomic_read_write(old, sizeof(*old));
1506 return arch_atomic64_try_cmpxchg(v, old, new);
1508 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
1511 #if defined(arch_atomic64_try_cmpxchg_acquire)
1512 static __always_inline bool
1513 atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
1515 instrument_atomic_read_write(v, sizeof(*v));
1516 instrument_atomic_read_write(old, sizeof(*old));
1517 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
1519 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1522 #if defined(arch_atomic64_try_cmpxchg_release)
1523 static __always_inline bool
1524 atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
1526 instrument_atomic_read_write(v, sizeof(*v));
1527 instrument_atomic_read_write(old, sizeof(*old));
1528 return arch_atomic64_try_cmpxchg_release(v, old, new);
1530 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1533 #if defined(arch_atomic64_try_cmpxchg_relaxed)
1534 static __always_inline bool
1535 atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
1537 instrument_atomic_read_write(v, sizeof(*v));
1538 instrument_atomic_read_write(old, sizeof(*old));
1539 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
1541 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
1544 #if defined(arch_atomic64_sub_and_test)
1545 static __always_inline bool
1546 atomic64_sub_and_test(s64 i, atomic64_t *v)
1548 instrument_atomic_read_write(v, sizeof(*v));
1549 return arch_atomic64_sub_and_test(i, v);
1551 #define atomic64_sub_and_test atomic64_sub_and_test
1554 #if defined(arch_atomic64_dec_and_test)
1555 static __always_inline bool
1556 atomic64_dec_and_test(atomic64_t *v)
1558 instrument_atomic_read_write(v, sizeof(*v));
1559 return arch_atomic64_dec_and_test(v);
1561 #define atomic64_dec_and_test atomic64_dec_and_test
1564 #if defined(arch_atomic64_inc_and_test)
1565 static __always_inline bool
1566 atomic64_inc_and_test(atomic64_t *v)
1568 instrument_atomic_read_write(v, sizeof(*v));
1569 return arch_atomic64_inc_and_test(v);
1571 #define atomic64_inc_and_test atomic64_inc_and_test
1574 #if defined(arch_atomic64_add_negative)
1575 static __always_inline bool
1576 atomic64_add_negative(s64 i, atomic64_t *v)
1578 instrument_atomic_read_write(v, sizeof(*v));
1579 return arch_atomic64_add_negative(i, v);
1581 #define atomic64_add_negative atomic64_add_negative
1584 #if defined(arch_atomic64_fetch_add_unless)
1585 static __always_inline s64
1586 atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1588 instrument_atomic_read_write(v, sizeof(*v));
1589 return arch_atomic64_fetch_add_unless(v, a, u);
1591 #define atomic64_fetch_add_unless atomic64_fetch_add_unless
1594 #if defined(arch_atomic64_add_unless)
1595 static __always_inline bool
1596 atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1598 instrument_atomic_read_write(v, sizeof(*v));
1599 return arch_atomic64_add_unless(v, a, u);
1601 #define atomic64_add_unless atomic64_add_unless
1604 #if defined(arch_atomic64_inc_not_zero)
1605 static __always_inline bool
1606 atomic64_inc_not_zero(atomic64_t *v)
1608 instrument_atomic_read_write(v, sizeof(*v));
1609 return arch_atomic64_inc_not_zero(v);
1611 #define atomic64_inc_not_zero atomic64_inc_not_zero
1614 #if defined(arch_atomic64_inc_unless_negative)
1615 static __always_inline bool
1616 atomic64_inc_unless_negative(atomic64_t *v)
1618 instrument_atomic_read_write(v, sizeof(*v));
1619 return arch_atomic64_inc_unless_negative(v);
1621 #define atomic64_inc_unless_negative atomic64_inc_unless_negative
1624 #if defined(arch_atomic64_dec_unless_positive)
1625 static __always_inline bool
1626 atomic64_dec_unless_positive(atomic64_t *v)
1628 instrument_atomic_read_write(v, sizeof(*v));
1629 return arch_atomic64_dec_unless_positive(v);
1631 #define atomic64_dec_unless_positive atomic64_dec_unless_positive
1634 #if defined(arch_atomic64_dec_if_positive)
1635 static __always_inline s64
1636 atomic64_dec_if_positive(atomic64_t *v)
1638 instrument_atomic_read_write(v, sizeof(*v));
1639 return arch_atomic64_dec_if_positive(v);
1641 #define atomic64_dec_if_positive atomic64_dec_if_positive
1644 #if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1645 #define xchg(ptr, ...) \
1647 typeof(ptr) __ai_ptr = (ptr); \
1648 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1649 arch_xchg(__ai_ptr, __VA_ARGS__); \
1653 #if defined(arch_xchg_acquire)
1654 #define xchg_acquire(ptr, ...) \
1656 typeof(ptr) __ai_ptr = (ptr); \
1657 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1658 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1662 #if defined(arch_xchg_release)
1663 #define xchg_release(ptr, ...) \
1665 typeof(ptr) __ai_ptr = (ptr); \
1666 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1667 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1671 #if defined(arch_xchg_relaxed)
1672 #define xchg_relaxed(ptr, ...) \
1674 typeof(ptr) __ai_ptr = (ptr); \
1675 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1676 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1680 #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1681 #define cmpxchg(ptr, ...) \
1683 typeof(ptr) __ai_ptr = (ptr); \
1684 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1685 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
1689 #if defined(arch_cmpxchg_acquire)
1690 #define cmpxchg_acquire(ptr, ...) \
1692 typeof(ptr) __ai_ptr = (ptr); \
1693 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1694 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1698 #if defined(arch_cmpxchg_release)
1699 #define cmpxchg_release(ptr, ...) \
1701 typeof(ptr) __ai_ptr = (ptr); \
1702 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1703 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1707 #if defined(arch_cmpxchg_relaxed)
1708 #define cmpxchg_relaxed(ptr, ...) \
1710 typeof(ptr) __ai_ptr = (ptr); \
1711 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1712 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1716 #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1717 #define cmpxchg64(ptr, ...) \
1719 typeof(ptr) __ai_ptr = (ptr); \
1720 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1721 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
1725 #if defined(arch_cmpxchg64_acquire)
1726 #define cmpxchg64_acquire(ptr, ...) \
1728 typeof(ptr) __ai_ptr = (ptr); \
1729 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1730 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1734 #if defined(arch_cmpxchg64_release)
1735 #define cmpxchg64_release(ptr, ...) \
1737 typeof(ptr) __ai_ptr = (ptr); \
1738 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1739 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1743 #if defined(arch_cmpxchg64_relaxed)
1744 #define cmpxchg64_relaxed(ptr, ...) \
1746 typeof(ptr) __ai_ptr = (ptr); \
1747 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1748 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
1752 #if !defined(arch_try_cmpxchg_relaxed) || defined(arch_try_cmpxchg)
1753 #define try_cmpxchg(ptr, oldp, ...) \
1755 typeof(ptr) __ai_ptr = (ptr); \
1756 typeof(oldp) __ai_oldp = (oldp); \
1757 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1758 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1759 arch_try_cmpxchg(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1763 #if defined(arch_try_cmpxchg_acquire)
1764 #define try_cmpxchg_acquire(ptr, oldp, ...) \
1766 typeof(ptr) __ai_ptr = (ptr); \
1767 typeof(oldp) __ai_oldp = (oldp); \
1768 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1769 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1770 arch_try_cmpxchg_acquire(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1774 #if defined(arch_try_cmpxchg_release)
1775 #define try_cmpxchg_release(ptr, oldp, ...) \
1777 typeof(ptr) __ai_ptr = (ptr); \
1778 typeof(oldp) __ai_oldp = (oldp); \
1779 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1780 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1781 arch_try_cmpxchg_release(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1785 #if defined(arch_try_cmpxchg_relaxed)
1786 #define try_cmpxchg_relaxed(ptr, oldp, ...) \
1788 typeof(ptr) __ai_ptr = (ptr); \
1789 typeof(oldp) __ai_oldp = (oldp); \
1790 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1791 instrument_atomic_write(__ai_oldp, sizeof(*__ai_oldp)); \
1792 arch_try_cmpxchg_relaxed(__ai_ptr, __ai_oldp, __VA_ARGS__); \
1796 #define cmpxchg_local(ptr, ...) \
1798 typeof(ptr) __ai_ptr = (ptr); \
1799 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1800 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
1803 #define cmpxchg64_local(ptr, ...) \
1805 typeof(ptr) __ai_ptr = (ptr); \
1806 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1807 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
1810 #define sync_cmpxchg(ptr, ...) \
1812 typeof(ptr) __ai_ptr = (ptr); \
1813 instrument_atomic_write(__ai_ptr, sizeof(*__ai_ptr)); \
1814 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
1817 #define cmpxchg_double(ptr, ...) \
1819 typeof(ptr) __ai_ptr = (ptr); \
1820 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1821 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
1825 #define cmpxchg_double_local(ptr, ...) \
1827 typeof(ptr) __ai_ptr = (ptr); \
1828 instrument_atomic_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1829 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
1832 #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
1833 // 4bec382e44520f4d8267e42620054db26a659ea3