1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Atomic operations usable in machine independent code */
3 #ifndef _LINUX_ATOMIC_H
4 #define _LINUX_ATOMIC_H
5 #include <linux/types.h>
7 #include <asm/atomic.h>
8 #include <asm/barrier.h>
11 * Relaxed variants of xchg, cmpxchg and some atomic operations.
13 * We support four variants:
15 * - Fully ordered: The default implementation, no suffix required.
16 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
17 * - Release: Provides RELEASE semantics, _release suffix.
18 * - Relaxed: No ordering guarantees, _relaxed suffix.
20 * For compound atomics performing both a load and a store, ACQUIRE
21 * semantics apply only to the load and RELEASE semantics only to the
22 * store portion of the operation. Note that a failed cmpxchg_acquire
23 * does -not- imply any memory ordering constraints.
25 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
28 #ifndef atomic_read_acquire
29 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
32 #ifndef atomic_set_release
33 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
37 * The idea here is to build acquire/release variants by adding explicit
38 * barriers on top of the relaxed variant. In the case where the relaxed
39 * variant is already fully ordered, no additional barriers are needed.
41 * If an architecture overrides __atomic_acquire_fence() it will probably
42 * want to define smp_mb__after_spinlock().
44 #ifndef __atomic_acquire_fence
45 #define __atomic_acquire_fence smp_mb__after_atomic
48 #ifndef __atomic_release_fence
49 #define __atomic_release_fence smp_mb__before_atomic
52 #ifndef __atomic_pre_full_fence
53 #define __atomic_pre_full_fence smp_mb__before_atomic
56 #ifndef __atomic_post_full_fence
57 #define __atomic_post_full_fence smp_mb__after_atomic
60 #define __atomic_op_acquire(op, args...) \
62 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
63 __atomic_acquire_fence(); \
67 #define __atomic_op_release(op, args...) \
69 __atomic_release_fence(); \
73 #define __atomic_op_fence(op, args...) \
75 typeof(op##_relaxed(args)) __ret; \
76 __atomic_pre_full_fence(); \
77 __ret = op##_relaxed(args); \
78 __atomic_post_full_fence(); \
82 /* atomic_add_return_relaxed */
83 #ifndef atomic_add_return_relaxed
84 #define atomic_add_return_relaxed atomic_add_return
85 #define atomic_add_return_acquire atomic_add_return
86 #define atomic_add_return_release atomic_add_return
88 #else /* atomic_add_return_relaxed */
90 #ifndef atomic_add_return_acquire
91 #define atomic_add_return_acquire(...) \
92 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
95 #ifndef atomic_add_return_release
96 #define atomic_add_return_release(...) \
97 __atomic_op_release(atomic_add_return, __VA_ARGS__)
100 #ifndef atomic_add_return
101 #define atomic_add_return(...) \
102 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
104 #endif /* atomic_add_return_relaxed */
107 #define atomic_inc(v) atomic_add(1, (v))
110 /* atomic_inc_return_relaxed */
111 #ifndef atomic_inc_return_relaxed
113 #ifndef atomic_inc_return
114 #define atomic_inc_return(v) atomic_add_return(1, (v))
115 #define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
116 #define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
117 #define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
118 #else /* atomic_inc_return */
119 #define atomic_inc_return_relaxed atomic_inc_return
120 #define atomic_inc_return_acquire atomic_inc_return
121 #define atomic_inc_return_release atomic_inc_return
122 #endif /* atomic_inc_return */
124 #else /* atomic_inc_return_relaxed */
126 #ifndef atomic_inc_return_acquire
127 #define atomic_inc_return_acquire(...) \
128 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
131 #ifndef atomic_inc_return_release
132 #define atomic_inc_return_release(...) \
133 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
136 #ifndef atomic_inc_return
137 #define atomic_inc_return(...) \
138 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
140 #endif /* atomic_inc_return_relaxed */
142 /* atomic_sub_return_relaxed */
143 #ifndef atomic_sub_return_relaxed
144 #define atomic_sub_return_relaxed atomic_sub_return
145 #define atomic_sub_return_acquire atomic_sub_return
146 #define atomic_sub_return_release atomic_sub_return
148 #else /* atomic_sub_return_relaxed */
150 #ifndef atomic_sub_return_acquire
151 #define atomic_sub_return_acquire(...) \
152 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
155 #ifndef atomic_sub_return_release
156 #define atomic_sub_return_release(...) \
157 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
160 #ifndef atomic_sub_return
161 #define atomic_sub_return(...) \
162 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
164 #endif /* atomic_sub_return_relaxed */
167 #define atomic_dec(v) atomic_sub(1, (v))
170 /* atomic_dec_return_relaxed */
171 #ifndef atomic_dec_return_relaxed
173 #ifndef atomic_dec_return
174 #define atomic_dec_return(v) atomic_sub_return(1, (v))
175 #define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
176 #define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
177 #define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
178 #else /* atomic_dec_return */
179 #define atomic_dec_return_relaxed atomic_dec_return
180 #define atomic_dec_return_acquire atomic_dec_return
181 #define atomic_dec_return_release atomic_dec_return
182 #endif /* atomic_dec_return */
184 #else /* atomic_dec_return_relaxed */
186 #ifndef atomic_dec_return_acquire
187 #define atomic_dec_return_acquire(...) \
188 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
191 #ifndef atomic_dec_return_release
192 #define atomic_dec_return_release(...) \
193 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
196 #ifndef atomic_dec_return
197 #define atomic_dec_return(...) \
198 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
200 #endif /* atomic_dec_return_relaxed */
203 /* atomic_fetch_add_relaxed */
204 #ifndef atomic_fetch_add_relaxed
205 #define atomic_fetch_add_relaxed atomic_fetch_add
206 #define atomic_fetch_add_acquire atomic_fetch_add
207 #define atomic_fetch_add_release atomic_fetch_add
209 #else /* atomic_fetch_add_relaxed */
211 #ifndef atomic_fetch_add_acquire
212 #define atomic_fetch_add_acquire(...) \
213 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
216 #ifndef atomic_fetch_add_release
217 #define atomic_fetch_add_release(...) \
218 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
221 #ifndef atomic_fetch_add
222 #define atomic_fetch_add(...) \
223 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
225 #endif /* atomic_fetch_add_relaxed */
227 /* atomic_fetch_inc_relaxed */
228 #ifndef atomic_fetch_inc_relaxed
230 #ifndef atomic_fetch_inc
231 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
232 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
233 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
234 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
235 #else /* atomic_fetch_inc */
236 #define atomic_fetch_inc_relaxed atomic_fetch_inc
237 #define atomic_fetch_inc_acquire atomic_fetch_inc
238 #define atomic_fetch_inc_release atomic_fetch_inc
239 #endif /* atomic_fetch_inc */
241 #else /* atomic_fetch_inc_relaxed */
243 #ifndef atomic_fetch_inc_acquire
244 #define atomic_fetch_inc_acquire(...) \
245 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
248 #ifndef atomic_fetch_inc_release
249 #define atomic_fetch_inc_release(...) \
250 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
253 #ifndef atomic_fetch_inc
254 #define atomic_fetch_inc(...) \
255 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
257 #endif /* atomic_fetch_inc_relaxed */
259 /* atomic_fetch_sub_relaxed */
260 #ifndef atomic_fetch_sub_relaxed
261 #define atomic_fetch_sub_relaxed atomic_fetch_sub
262 #define atomic_fetch_sub_acquire atomic_fetch_sub
263 #define atomic_fetch_sub_release atomic_fetch_sub
265 #else /* atomic_fetch_sub_relaxed */
267 #ifndef atomic_fetch_sub_acquire
268 #define atomic_fetch_sub_acquire(...) \
269 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
272 #ifndef atomic_fetch_sub_release
273 #define atomic_fetch_sub_release(...) \
274 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
277 #ifndef atomic_fetch_sub
278 #define atomic_fetch_sub(...) \
279 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
281 #endif /* atomic_fetch_sub_relaxed */
283 /* atomic_fetch_dec_relaxed */
284 #ifndef atomic_fetch_dec_relaxed
286 #ifndef atomic_fetch_dec
287 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
288 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
289 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
290 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
291 #else /* atomic_fetch_dec */
292 #define atomic_fetch_dec_relaxed atomic_fetch_dec
293 #define atomic_fetch_dec_acquire atomic_fetch_dec
294 #define atomic_fetch_dec_release atomic_fetch_dec
295 #endif /* atomic_fetch_dec */
297 #else /* atomic_fetch_dec_relaxed */
299 #ifndef atomic_fetch_dec_acquire
300 #define atomic_fetch_dec_acquire(...) \
301 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
304 #ifndef atomic_fetch_dec_release
305 #define atomic_fetch_dec_release(...) \
306 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
309 #ifndef atomic_fetch_dec
310 #define atomic_fetch_dec(...) \
311 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
313 #endif /* atomic_fetch_dec_relaxed */
315 /* atomic_fetch_or_relaxed */
316 #ifndef atomic_fetch_or_relaxed
317 #define atomic_fetch_or_relaxed atomic_fetch_or
318 #define atomic_fetch_or_acquire atomic_fetch_or
319 #define atomic_fetch_or_release atomic_fetch_or
321 #else /* atomic_fetch_or_relaxed */
323 #ifndef atomic_fetch_or_acquire
324 #define atomic_fetch_or_acquire(...) \
325 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
328 #ifndef atomic_fetch_or_release
329 #define atomic_fetch_or_release(...) \
330 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
333 #ifndef atomic_fetch_or
334 #define atomic_fetch_or(...) \
335 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
337 #endif /* atomic_fetch_or_relaxed */
339 /* atomic_fetch_and_relaxed */
340 #ifndef atomic_fetch_and_relaxed
341 #define atomic_fetch_and_relaxed atomic_fetch_and
342 #define atomic_fetch_and_acquire atomic_fetch_and
343 #define atomic_fetch_and_release atomic_fetch_and
345 #else /* atomic_fetch_and_relaxed */
347 #ifndef atomic_fetch_and_acquire
348 #define atomic_fetch_and_acquire(...) \
349 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
352 #ifndef atomic_fetch_and_release
353 #define atomic_fetch_and_release(...) \
354 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
357 #ifndef atomic_fetch_and
358 #define atomic_fetch_and(...) \
359 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
361 #endif /* atomic_fetch_and_relaxed */
363 #ifndef atomic_andnot
364 #define atomic_andnot(i, v) atomic_and(~(int)(i), (v))
367 #ifndef atomic_fetch_andnot_relaxed
369 #ifndef atomic_fetch_andnot
370 #define atomic_fetch_andnot(i, v) atomic_fetch_and(~(int)(i), (v))
371 #define atomic_fetch_andnot_relaxed(i, v) atomic_fetch_and_relaxed(~(int)(i), (v))
372 #define atomic_fetch_andnot_acquire(i, v) atomic_fetch_and_acquire(~(int)(i), (v))
373 #define atomic_fetch_andnot_release(i, v) atomic_fetch_and_release(~(int)(i), (v))
374 #else /* atomic_fetch_andnot */
375 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
376 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
377 #define atomic_fetch_andnot_release atomic_fetch_andnot
378 #endif /* atomic_fetch_andnot */
380 #else /* atomic_fetch_andnot_relaxed */
382 #ifndef atomic_fetch_andnot_acquire
383 #define atomic_fetch_andnot_acquire(...) \
384 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
387 #ifndef atomic_fetch_andnot_release
388 #define atomic_fetch_andnot_release(...) \
389 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
392 #ifndef atomic_fetch_andnot
393 #define atomic_fetch_andnot(...) \
394 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
396 #endif /* atomic_fetch_andnot_relaxed */
398 /* atomic_fetch_xor_relaxed */
399 #ifndef atomic_fetch_xor_relaxed
400 #define atomic_fetch_xor_relaxed atomic_fetch_xor
401 #define atomic_fetch_xor_acquire atomic_fetch_xor
402 #define atomic_fetch_xor_release atomic_fetch_xor
404 #else /* atomic_fetch_xor_relaxed */
406 #ifndef atomic_fetch_xor_acquire
407 #define atomic_fetch_xor_acquire(...) \
408 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
411 #ifndef atomic_fetch_xor_release
412 #define atomic_fetch_xor_release(...) \
413 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
416 #ifndef atomic_fetch_xor
417 #define atomic_fetch_xor(...) \
418 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
420 #endif /* atomic_fetch_xor_relaxed */
423 /* atomic_xchg_relaxed */
424 #ifndef atomic_xchg_relaxed
425 #define atomic_xchg_relaxed atomic_xchg
426 #define atomic_xchg_acquire atomic_xchg
427 #define atomic_xchg_release atomic_xchg
429 #else /* atomic_xchg_relaxed */
431 #ifndef atomic_xchg_acquire
432 #define atomic_xchg_acquire(...) \
433 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
436 #ifndef atomic_xchg_release
437 #define atomic_xchg_release(...) \
438 __atomic_op_release(atomic_xchg, __VA_ARGS__)
442 #define atomic_xchg(...) \
443 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
445 #endif /* atomic_xchg_relaxed */
447 /* atomic_cmpxchg_relaxed */
448 #ifndef atomic_cmpxchg_relaxed
449 #define atomic_cmpxchg_relaxed atomic_cmpxchg
450 #define atomic_cmpxchg_acquire atomic_cmpxchg
451 #define atomic_cmpxchg_release atomic_cmpxchg
453 #else /* atomic_cmpxchg_relaxed */
455 #ifndef atomic_cmpxchg_acquire
456 #define atomic_cmpxchg_acquire(...) \
457 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
460 #ifndef atomic_cmpxchg_release
461 #define atomic_cmpxchg_release(...) \
462 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
465 #ifndef atomic_cmpxchg
466 #define atomic_cmpxchg(...) \
467 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
469 #endif /* atomic_cmpxchg_relaxed */
471 #ifndef atomic_try_cmpxchg
473 #define __atomic_try_cmpxchg(type, _p, _po, _n) \
475 typeof(_po) __po = (_po); \
476 typeof(*(_po)) __r, __o = *__po; \
477 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
478 if (unlikely(__r != __o)) \
480 likely(__r == __o); \
483 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
484 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
485 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
486 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
488 #else /* atomic_try_cmpxchg */
489 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
490 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
491 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
492 #endif /* atomic_try_cmpxchg */
494 /* cmpxchg_relaxed */
495 #ifndef cmpxchg_relaxed
496 #define cmpxchg_relaxed cmpxchg
497 #define cmpxchg_acquire cmpxchg
498 #define cmpxchg_release cmpxchg
500 #else /* cmpxchg_relaxed */
502 #ifndef cmpxchg_acquire
503 #define cmpxchg_acquire(...) \
504 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
507 #ifndef cmpxchg_release
508 #define cmpxchg_release(...) \
509 __atomic_op_release(cmpxchg, __VA_ARGS__)
513 #define cmpxchg(...) \
514 __atomic_op_fence(cmpxchg, __VA_ARGS__)
516 #endif /* cmpxchg_relaxed */
518 /* cmpxchg64_relaxed */
519 #ifndef cmpxchg64_relaxed
520 #define cmpxchg64_relaxed cmpxchg64
521 #define cmpxchg64_acquire cmpxchg64
522 #define cmpxchg64_release cmpxchg64
524 #else /* cmpxchg64_relaxed */
526 #ifndef cmpxchg64_acquire
527 #define cmpxchg64_acquire(...) \
528 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
531 #ifndef cmpxchg64_release
532 #define cmpxchg64_release(...) \
533 __atomic_op_release(cmpxchg64, __VA_ARGS__)
537 #define cmpxchg64(...) \
538 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
540 #endif /* cmpxchg64_relaxed */
544 #define xchg_relaxed xchg
545 #define xchg_acquire xchg
546 #define xchg_release xchg
548 #else /* xchg_relaxed */
551 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
555 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
559 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
561 #endif /* xchg_relaxed */
564 * atomic_fetch_add_unless - add unless the number is already a given value
565 * @v: pointer of type atomic_t
566 * @a: the amount to add to v...
567 * @u: ...unless v is equal to u.
569 * Atomically adds @a to @v, if @v was not already @u.
570 * Returns the original value of @v.
572 #ifndef atomic_fetch_add_unless
573 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
575 int c = atomic_read(v);
578 if (unlikely(c == u))
580 } while (!atomic_try_cmpxchg(v, &c, c + a));
587 * atomic_add_unless - add unless the number is already a given value
588 * @v: pointer of type atomic_t
589 * @a: the amount to add to v...
590 * @u: ...unless v is equal to u.
592 * Atomically adds @a to @v, if @v was not already @u.
593 * Returns true if the addition was done.
595 static inline bool atomic_add_unless(atomic_t *v, int a, int u)
597 return atomic_fetch_add_unless(v, a, u) != u;
601 * atomic_inc_not_zero - increment unless the number is zero
602 * @v: pointer of type atomic_t
604 * Atomically increments @v by 1, if @v is non-zero.
605 * Returns true if the increment was done.
607 #ifndef atomic_inc_not_zero
608 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
612 * atomic_inc_and_test - increment and test
613 * @v: pointer of type atomic_t
615 * Atomically increments @v by 1
616 * and returns true if the result is zero, or false for all
619 #ifndef atomic_inc_and_test
620 static inline bool atomic_inc_and_test(atomic_t *v)
622 return atomic_inc_return(v) == 0;
627 * atomic_dec_and_test - decrement and test
628 * @v: pointer of type atomic_t
630 * Atomically decrements @v by 1 and
631 * returns true if the result is 0, or false for all other
634 #ifndef atomic_dec_and_test
635 static inline bool atomic_dec_and_test(atomic_t *v)
637 return atomic_dec_return(v) == 0;
642 * atomic_sub_and_test - subtract value from variable and test result
643 * @i: integer value to subtract
644 * @v: pointer of type atomic_t
646 * Atomically subtracts @i from @v and returns
647 * true if the result is zero, or false for all
650 #ifndef atomic_sub_and_test
651 static inline bool atomic_sub_and_test(int i, atomic_t *v)
653 return atomic_sub_return(i, v) == 0;
658 * atomic_add_negative - add and test if negative
659 * @i: integer value to add
660 * @v: pointer of type atomic_t
662 * Atomically adds @i to @v and returns true
663 * if the result is negative, or false when
664 * result is greater than or equal to zero.
666 #ifndef atomic_add_negative
667 static inline bool atomic_add_negative(int i, atomic_t *v)
669 return atomic_add_return(i, v) < 0;
673 #ifndef atomic_inc_unless_negative
674 static inline bool atomic_inc_unless_negative(atomic_t *v)
676 int c = atomic_read(v);
681 } while (!atomic_try_cmpxchg(v, &c, c + 1));
687 #ifndef atomic_dec_unless_positive
688 static inline bool atomic_dec_unless_positive(atomic_t *v)
690 int c = atomic_read(v);
695 } while (!atomic_try_cmpxchg(v, &c, c - 1));
702 * atomic_dec_if_positive - decrement by 1 if old value positive
703 * @v: pointer of type atomic_t
705 * The function returns the old value of *v minus 1, even if
706 * the atomic variable, v, was not decremented.
708 #ifndef atomic_dec_if_positive
709 static inline int atomic_dec_if_positive(atomic_t *v)
711 int dec, c = atomic_read(v);
715 if (unlikely(dec < 0))
717 } while (!atomic_try_cmpxchg(v, &c, dec));
723 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
724 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
726 #ifdef CONFIG_GENERIC_ATOMIC64
727 #include <asm-generic/atomic64.h>
730 #ifndef atomic64_read_acquire
731 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
734 #ifndef atomic64_set_release
735 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
738 /* atomic64_add_return_relaxed */
739 #ifndef atomic64_add_return_relaxed
740 #define atomic64_add_return_relaxed atomic64_add_return
741 #define atomic64_add_return_acquire atomic64_add_return
742 #define atomic64_add_return_release atomic64_add_return
744 #else /* atomic64_add_return_relaxed */
746 #ifndef atomic64_add_return_acquire
747 #define atomic64_add_return_acquire(...) \
748 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
751 #ifndef atomic64_add_return_release
752 #define atomic64_add_return_release(...) \
753 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
756 #ifndef atomic64_add_return
757 #define atomic64_add_return(...) \
758 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
760 #endif /* atomic64_add_return_relaxed */
763 #define atomic64_inc(v) atomic64_add(1, (v))
766 /* atomic64_inc_return_relaxed */
767 #ifndef atomic64_inc_return_relaxed
769 #ifndef atomic64_inc_return
770 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
771 #define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
772 #define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
773 #define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
774 #else /* atomic64_inc_return */
775 #define atomic64_inc_return_relaxed atomic64_inc_return
776 #define atomic64_inc_return_acquire atomic64_inc_return
777 #define atomic64_inc_return_release atomic64_inc_return
778 #endif /* atomic64_inc_return */
780 #else /* atomic64_inc_return_relaxed */
782 #ifndef atomic64_inc_return_acquire
783 #define atomic64_inc_return_acquire(...) \
784 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
787 #ifndef atomic64_inc_return_release
788 #define atomic64_inc_return_release(...) \
789 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
792 #ifndef atomic64_inc_return
793 #define atomic64_inc_return(...) \
794 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
796 #endif /* atomic64_inc_return_relaxed */
799 /* atomic64_sub_return_relaxed */
800 #ifndef atomic64_sub_return_relaxed
801 #define atomic64_sub_return_relaxed atomic64_sub_return
802 #define atomic64_sub_return_acquire atomic64_sub_return
803 #define atomic64_sub_return_release atomic64_sub_return
805 #else /* atomic64_sub_return_relaxed */
807 #ifndef atomic64_sub_return_acquire
808 #define atomic64_sub_return_acquire(...) \
809 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
812 #ifndef atomic64_sub_return_release
813 #define atomic64_sub_return_release(...) \
814 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
817 #ifndef atomic64_sub_return
818 #define atomic64_sub_return(...) \
819 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
821 #endif /* atomic64_sub_return_relaxed */
824 #define atomic64_dec(v) atomic64_sub(1, (v))
827 /* atomic64_dec_return_relaxed */
828 #ifndef atomic64_dec_return_relaxed
830 #ifndef atomic64_dec_return
831 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
832 #define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
833 #define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
834 #define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
835 #else /* atomic64_dec_return */
836 #define atomic64_dec_return_relaxed atomic64_dec_return
837 #define atomic64_dec_return_acquire atomic64_dec_return
838 #define atomic64_dec_return_release atomic64_dec_return
839 #endif /* atomic64_dec_return */
841 #else /* atomic64_dec_return_relaxed */
843 #ifndef atomic64_dec_return_acquire
844 #define atomic64_dec_return_acquire(...) \
845 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
848 #ifndef atomic64_dec_return_release
849 #define atomic64_dec_return_release(...) \
850 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
853 #ifndef atomic64_dec_return
854 #define atomic64_dec_return(...) \
855 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
857 #endif /* atomic64_dec_return_relaxed */
860 /* atomic64_fetch_add_relaxed */
861 #ifndef atomic64_fetch_add_relaxed
862 #define atomic64_fetch_add_relaxed atomic64_fetch_add
863 #define atomic64_fetch_add_acquire atomic64_fetch_add
864 #define atomic64_fetch_add_release atomic64_fetch_add
866 #else /* atomic64_fetch_add_relaxed */
868 #ifndef atomic64_fetch_add_acquire
869 #define atomic64_fetch_add_acquire(...) \
870 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
873 #ifndef atomic64_fetch_add_release
874 #define atomic64_fetch_add_release(...) \
875 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
878 #ifndef atomic64_fetch_add
879 #define atomic64_fetch_add(...) \
880 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
882 #endif /* atomic64_fetch_add_relaxed */
884 /* atomic64_fetch_inc_relaxed */
885 #ifndef atomic64_fetch_inc_relaxed
887 #ifndef atomic64_fetch_inc
888 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
889 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
890 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
891 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
892 #else /* atomic64_fetch_inc */
893 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
894 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
895 #define atomic64_fetch_inc_release atomic64_fetch_inc
896 #endif /* atomic64_fetch_inc */
898 #else /* atomic64_fetch_inc_relaxed */
900 #ifndef atomic64_fetch_inc_acquire
901 #define atomic64_fetch_inc_acquire(...) \
902 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
905 #ifndef atomic64_fetch_inc_release
906 #define atomic64_fetch_inc_release(...) \
907 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
910 #ifndef atomic64_fetch_inc
911 #define atomic64_fetch_inc(...) \
912 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
914 #endif /* atomic64_fetch_inc_relaxed */
916 /* atomic64_fetch_sub_relaxed */
917 #ifndef atomic64_fetch_sub_relaxed
918 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
919 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
920 #define atomic64_fetch_sub_release atomic64_fetch_sub
922 #else /* atomic64_fetch_sub_relaxed */
924 #ifndef atomic64_fetch_sub_acquire
925 #define atomic64_fetch_sub_acquire(...) \
926 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
929 #ifndef atomic64_fetch_sub_release
930 #define atomic64_fetch_sub_release(...) \
931 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
934 #ifndef atomic64_fetch_sub
935 #define atomic64_fetch_sub(...) \
936 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
938 #endif /* atomic64_fetch_sub_relaxed */
940 /* atomic64_fetch_dec_relaxed */
941 #ifndef atomic64_fetch_dec_relaxed
943 #ifndef atomic64_fetch_dec
944 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
945 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
946 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
947 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
948 #else /* atomic64_fetch_dec */
949 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
950 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
951 #define atomic64_fetch_dec_release atomic64_fetch_dec
952 #endif /* atomic64_fetch_dec */
954 #else /* atomic64_fetch_dec_relaxed */
956 #ifndef atomic64_fetch_dec_acquire
957 #define atomic64_fetch_dec_acquire(...) \
958 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
961 #ifndef atomic64_fetch_dec_release
962 #define atomic64_fetch_dec_release(...) \
963 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
966 #ifndef atomic64_fetch_dec
967 #define atomic64_fetch_dec(...) \
968 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
970 #endif /* atomic64_fetch_dec_relaxed */
972 /* atomic64_fetch_or_relaxed */
973 #ifndef atomic64_fetch_or_relaxed
974 #define atomic64_fetch_or_relaxed atomic64_fetch_or
975 #define atomic64_fetch_or_acquire atomic64_fetch_or
976 #define atomic64_fetch_or_release atomic64_fetch_or
978 #else /* atomic64_fetch_or_relaxed */
980 #ifndef atomic64_fetch_or_acquire
981 #define atomic64_fetch_or_acquire(...) \
982 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
985 #ifndef atomic64_fetch_or_release
986 #define atomic64_fetch_or_release(...) \
987 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
990 #ifndef atomic64_fetch_or
991 #define atomic64_fetch_or(...) \
992 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
994 #endif /* atomic64_fetch_or_relaxed */
996 /* atomic64_fetch_and_relaxed */
997 #ifndef atomic64_fetch_and_relaxed
998 #define atomic64_fetch_and_relaxed atomic64_fetch_and
999 #define atomic64_fetch_and_acquire atomic64_fetch_and
1000 #define atomic64_fetch_and_release atomic64_fetch_and
1002 #else /* atomic64_fetch_and_relaxed */
1004 #ifndef atomic64_fetch_and_acquire
1005 #define atomic64_fetch_and_acquire(...) \
1006 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
1009 #ifndef atomic64_fetch_and_release
1010 #define atomic64_fetch_and_release(...) \
1011 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
1014 #ifndef atomic64_fetch_and
1015 #define atomic64_fetch_and(...) \
1016 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
1018 #endif /* atomic64_fetch_and_relaxed */
1020 #ifndef atomic64_andnot
1021 #define atomic64_andnot(i, v) atomic64_and(~(long long)(i), (v))
1024 #ifndef atomic64_fetch_andnot_relaxed
1026 #ifndef atomic64_fetch_andnot
1027 #define atomic64_fetch_andnot(i, v) atomic64_fetch_and(~(long long)(i), (v))
1028 #define atomic64_fetch_andnot_relaxed(i, v) atomic64_fetch_and_relaxed(~(long long)(i), (v))
1029 #define atomic64_fetch_andnot_acquire(i, v) atomic64_fetch_and_acquire(~(long long)(i), (v))
1030 #define atomic64_fetch_andnot_release(i, v) atomic64_fetch_and_release(~(long long)(i), (v))
1031 #else /* atomic64_fetch_andnot */
1032 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1033 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1034 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
1035 #endif /* atomic64_fetch_andnot */
1037 #else /* atomic64_fetch_andnot_relaxed */
1039 #ifndef atomic64_fetch_andnot_acquire
1040 #define atomic64_fetch_andnot_acquire(...) \
1041 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
1044 #ifndef atomic64_fetch_andnot_release
1045 #define atomic64_fetch_andnot_release(...) \
1046 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
1049 #ifndef atomic64_fetch_andnot
1050 #define atomic64_fetch_andnot(...) \
1051 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1053 #endif /* atomic64_fetch_andnot_relaxed */
1055 /* atomic64_fetch_xor_relaxed */
1056 #ifndef atomic64_fetch_xor_relaxed
1057 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1058 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
1059 #define atomic64_fetch_xor_release atomic64_fetch_xor
1061 #else /* atomic64_fetch_xor_relaxed */
1063 #ifndef atomic64_fetch_xor_acquire
1064 #define atomic64_fetch_xor_acquire(...) \
1065 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
1068 #ifndef atomic64_fetch_xor_release
1069 #define atomic64_fetch_xor_release(...) \
1070 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
1073 #ifndef atomic64_fetch_xor
1074 #define atomic64_fetch_xor(...) \
1075 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
1077 #endif /* atomic64_fetch_xor_relaxed */
1080 /* atomic64_xchg_relaxed */
1081 #ifndef atomic64_xchg_relaxed
1082 #define atomic64_xchg_relaxed atomic64_xchg
1083 #define atomic64_xchg_acquire atomic64_xchg
1084 #define atomic64_xchg_release atomic64_xchg
1086 #else /* atomic64_xchg_relaxed */
1088 #ifndef atomic64_xchg_acquire
1089 #define atomic64_xchg_acquire(...) \
1090 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
1093 #ifndef atomic64_xchg_release
1094 #define atomic64_xchg_release(...) \
1095 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
1098 #ifndef atomic64_xchg
1099 #define atomic64_xchg(...) \
1100 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1102 #endif /* atomic64_xchg_relaxed */
1104 /* atomic64_cmpxchg_relaxed */
1105 #ifndef atomic64_cmpxchg_relaxed
1106 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1107 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1108 #define atomic64_cmpxchg_release atomic64_cmpxchg
1110 #else /* atomic64_cmpxchg_relaxed */
1112 #ifndef atomic64_cmpxchg_acquire
1113 #define atomic64_cmpxchg_acquire(...) \
1114 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1117 #ifndef atomic64_cmpxchg_release
1118 #define atomic64_cmpxchg_release(...) \
1119 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1122 #ifndef atomic64_cmpxchg
1123 #define atomic64_cmpxchg(...) \
1124 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1126 #endif /* atomic64_cmpxchg_relaxed */
1128 #ifndef atomic64_try_cmpxchg
1130 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1132 typeof(_po) __po = (_po); \
1133 typeof(*(_po)) __r, __o = *__po; \
1134 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1135 if (unlikely(__r != __o)) \
1137 likely(__r == __o); \
1140 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1141 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1142 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1143 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1145 #else /* atomic64_try_cmpxchg */
1146 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1147 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1148 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1149 #endif /* atomic64_try_cmpxchg */
1152 * atomic64_fetch_add_unless - add unless the number is already a given value
1153 * @v: pointer of type atomic64_t
1154 * @a: the amount to add to v...
1155 * @u: ...unless v is equal to u.
1157 * Atomically adds @a to @v, if @v was not already @u.
1158 * Returns the original value of @v.
1160 #ifndef atomic64_fetch_add_unless
1161 static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1164 long long c = atomic64_read(v);
1167 if (unlikely(c == u))
1169 } while (!atomic64_try_cmpxchg(v, &c, c + a));
1176 * atomic64_add_unless - add unless the number is already a given value
1177 * @v: pointer of type atomic_t
1178 * @a: the amount to add to v...
1179 * @u: ...unless v is equal to u.
1181 * Atomically adds @a to @v, if @v was not already @u.
1182 * Returns true if the addition was done.
1184 static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1186 return atomic64_fetch_add_unless(v, a, u) != u;
1190 * atomic64_inc_not_zero - increment unless the number is zero
1191 * @v: pointer of type atomic64_t
1193 * Atomically increments @v by 1, if @v is non-zero.
1194 * Returns true if the increment was done.
1196 #ifndef atomic64_inc_not_zero
1197 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
1201 * atomic64_inc_and_test - increment and test
1202 * @v: pointer of type atomic64_t
1204 * Atomically increments @v by 1
1205 * and returns true if the result is zero, or false for all
1208 #ifndef atomic64_inc_and_test
1209 static inline bool atomic64_inc_and_test(atomic64_t *v)
1211 return atomic64_inc_return(v) == 0;
1216 * atomic64_dec_and_test - decrement and test
1217 * @v: pointer of type atomic64_t
1219 * Atomically decrements @v by 1 and
1220 * returns true if the result is 0, or false for all other
1223 #ifndef atomic64_dec_and_test
1224 static inline bool atomic64_dec_and_test(atomic64_t *v)
1226 return atomic64_dec_return(v) == 0;
1231 * atomic64_sub_and_test - subtract value from variable and test result
1232 * @i: integer value to subtract
1233 * @v: pointer of type atomic64_t
1235 * Atomically subtracts @i from @v and returns
1236 * true if the result is zero, or false for all
1239 #ifndef atomic64_sub_and_test
1240 static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1242 return atomic64_sub_return(i, v) == 0;
1247 * atomic64_add_negative - add and test if negative
1248 * @i: integer value to add
1249 * @v: pointer of type atomic64_t
1251 * Atomically adds @i to @v and returns true
1252 * if the result is negative, or false when
1253 * result is greater than or equal to zero.
1255 #ifndef atomic64_add_negative
1256 static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1258 return atomic64_add_return(i, v) < 0;
1262 #ifndef atomic64_inc_unless_negative
1263 static inline bool atomic64_inc_unless_negative(atomic64_t *v)
1265 long long c = atomic64_read(v);
1268 if (unlikely(c < 0))
1270 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
1276 #ifndef atomic64_dec_unless_positive
1277 static inline bool atomic64_dec_unless_positive(atomic64_t *v)
1279 long long c = atomic64_read(v);
1282 if (unlikely(c > 0))
1284 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
1291 * atomic64_dec_if_positive - decrement by 1 if old value positive
1292 * @v: pointer of type atomic64_t
1294 * The function returns the old value of *v minus 1, even if
1295 * the atomic64 variable, v, was not decremented.
1297 #ifndef atomic64_dec_if_positive
1298 static inline long long atomic64_dec_if_positive(atomic64_t *v)
1300 long long dec, c = atomic64_read(v);
1304 if (unlikely(dec < 0))
1306 } while (!atomic64_try_cmpxchg(v, &c, dec));
1312 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
1313 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1315 #include <asm-generic/atomic-long.h>
1317 #endif /* _LINUX_ATOMIC_H */