1 /* atomic.h: Thankfully the V9 is at least reasonable for this
4 * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
7 #ifndef __ARCH_SPARC64_ATOMIC__
8 #define __ARCH_SPARC64_ATOMIC__
10 #include <linux/types.h>
11 #include <asm/cmpxchg.h>
13 #define ATOMIC_INIT(i) { (i) }
14 #define ATOMIC64_INIT(i) { (i) }
16 #define atomic_read(v) (*(volatile int *)&(v)->counter)
17 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
19 #define atomic_set(v, i) (((v)->counter) = i)
20 #define atomic64_set(v, i) (((v)->counter) = i)
22 extern void atomic_add(int, atomic_t *);
23 extern void atomic64_add(long, atomic64_t *);
24 extern void atomic_sub(int, atomic_t *);
25 extern void atomic64_sub(long, atomic64_t *);
27 extern int atomic_add_ret(int, atomic_t *);
28 extern long atomic64_add_ret(long, atomic64_t *);
29 extern int atomic_sub_ret(int, atomic_t *);
30 extern long atomic64_sub_ret(long, atomic64_t *);
32 #define atomic_dec_return(v) atomic_sub_ret(1, v)
33 #define atomic64_dec_return(v) atomic64_sub_ret(1, v)
35 #define atomic_inc_return(v) atomic_add_ret(1, v)
36 #define atomic64_inc_return(v) atomic64_add_ret(1, v)
38 #define atomic_sub_return(i, v) atomic_sub_ret(i, v)
39 #define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
41 #define atomic_add_return(i, v) atomic_add_ret(i, v)
42 #define atomic64_add_return(i, v) atomic64_add_ret(i, v)
45 * atomic_inc_and_test - increment and test
46 * @v: pointer of type atomic_t
48 * Atomically increments @v by 1
49 * and returns true if the result is zero, or false for all
52 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
53 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
55 #define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
56 #define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
58 #define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
59 #define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
61 #define atomic_inc(v) atomic_add(1, v)
62 #define atomic64_inc(v) atomic64_add(1, v)
64 #define atomic_dec(v) atomic_sub(1, v)
65 #define atomic64_dec(v) atomic64_sub(1, v)
67 #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
68 #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
70 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
71 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
73 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
78 if (unlikely(c == (u)))
80 old = atomic_cmpxchg((v), c, c + (a));
88 #define atomic64_cmpxchg(v, o, n) \
89 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
90 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
92 static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
97 if (unlikely(c == (u)))
99 old = atomic64_cmpxchg((v), c, c + (a));
100 if (likely(old == c))
107 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
109 /* Atomic operations are already serializing */
110 #define smp_mb__before_atomic_dec() barrier()
111 #define smp_mb__after_atomic_dec() barrier()
112 #define smp_mb__before_atomic_inc() barrier()
113 #define smp_mb__after_atomic_inc() barrier()
115 #endif /* !(__ARCH_SPARC64_ATOMIC__) */