2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
26 * Using a branch-likely instruction to check the result of an sc instruction
27 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
28 * cause ll-sc sequences to execute non-atomically.
31 # define __scbeqz "beqzl"
33 # define __scbeqz "beqz"
36 #define ATOMIC_INIT(i) { (i) }
39 * atomic_read - read atomic variable
40 * @v: pointer of type atomic_t
42 * Atomically reads the value of @v.
44 #define atomic_read(v) READ_ONCE((v)->counter)
47 * atomic_set - set atomic variable
48 * @v: pointer of type atomic_t
51 * Atomically sets the value of @v to @i.
53 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
55 #define ATOMIC_OP(op, c_op, asm_op) \
56 static __inline__ void atomic_##op(int i, atomic_t * v) \
58 if (kernel_uses_llsc) { \
61 __asm__ __volatile__( \
62 " .set "MIPS_ISA_LEVEL" \n" \
63 "1: ll %0, %1 # atomic_" #op " \n" \
64 " " #asm_op " %0, %2 \n" \
66 "\t" __scbeqz " %0, 1b \n" \
68 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
71 unsigned long flags; \
73 raw_local_irq_save(flags); \
75 raw_local_irq_restore(flags); \
79 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
80 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
84 if (kernel_uses_llsc) { \
87 __asm__ __volatile__( \
88 " .set "MIPS_ISA_LEVEL" \n" \
89 "1: ll %1, %2 # atomic_" #op "_return \n" \
90 " " #asm_op " %0, %1, %3 \n" \
92 "\t" __scbeqz " %0, 1b \n" \
93 " " #asm_op " %0, %1, %3 \n" \
95 : "=&r" (result), "=&r" (temp), \
96 "+" GCC_OFF_SMALL_ASM() (v->counter) \
99 unsigned long flags; \
101 raw_local_irq_save(flags); \
102 result = v->counter; \
104 v->counter = result; \
105 raw_local_irq_restore(flags); \
111 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
112 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
116 if (kernel_uses_llsc) { \
119 __asm__ __volatile__( \
120 " .set "MIPS_ISA_LEVEL" \n" \
121 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
122 " " #asm_op " %0, %1, %3 \n" \
124 "\t" __scbeqz " %0, 1b \n" \
127 : "=&r" (result), "=&r" (temp), \
128 "+" GCC_OFF_SMALL_ASM() (v->counter) \
131 unsigned long flags; \
133 raw_local_irq_save(flags); \
134 result = v->counter; \
136 raw_local_irq_restore(flags); \
142 #define ATOMIC_OPS(op, c_op, asm_op) \
143 ATOMIC_OP(op, c_op, asm_op) \
144 ATOMIC_OP_RETURN(op, c_op, asm_op) \
145 ATOMIC_FETCH_OP(op, c_op, asm_op)
147 ATOMIC_OPS(add, +=, addu)
148 ATOMIC_OPS(sub, -=, subu)
150 #define atomic_add_return_relaxed atomic_add_return_relaxed
151 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
152 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
153 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
156 #define ATOMIC_OPS(op, c_op, asm_op) \
157 ATOMIC_OP(op, c_op, asm_op) \
158 ATOMIC_FETCH_OP(op, c_op, asm_op)
160 ATOMIC_OPS(and, &=, and)
161 ATOMIC_OPS(or, |=, or)
162 ATOMIC_OPS(xor, ^=, xor)
164 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
165 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
166 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
169 #undef ATOMIC_FETCH_OP
170 #undef ATOMIC_OP_RETURN
174 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
175 * @i: integer value to subtract
176 * @v: pointer of type atomic_t
178 * Atomically test @v and subtract @i if @v is greater or equal than @i.
179 * The function returns the old value of @v minus @i.
181 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
185 smp_mb__before_llsc();
187 if (kernel_uses_llsc) {
190 __asm__ __volatile__(
191 " .set "MIPS_ISA_LEVEL" \n"
192 "1: ll %1, %2 # atomic_sub_if_positive\n"
194 " subu %0, %1, %3 \n"
197 " .set "MIPS_ISA_LEVEL" \n"
199 "\t" __scbeqz " %1, 1b \n"
202 : "=&r" (result), "=&r" (temp),
203 "+" GCC_OFF_SMALL_ASM() (v->counter)
208 raw_local_irq_save(flags);
213 raw_local_irq_restore(flags);
221 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
222 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
225 * atomic_dec_if_positive - decrement by 1 if old value positive
226 * @v: pointer of type atomic_t
228 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
232 #define ATOMIC64_INIT(i) { (i) }
235 * atomic64_read - read atomic variable
236 * @v: pointer of type atomic64_t
239 #define atomic64_read(v) READ_ONCE((v)->counter)
242 * atomic64_set - set atomic variable
243 * @v: pointer of type atomic64_t
246 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
248 #define ATOMIC64_OP(op, c_op, asm_op) \
249 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
251 if (kernel_uses_llsc) { \
254 __asm__ __volatile__( \
255 " .set "MIPS_ISA_LEVEL" \n" \
256 "1: lld %0, %1 # atomic64_" #op " \n" \
257 " " #asm_op " %0, %2 \n" \
259 "\t" __scbeqz " %0, 1b \n" \
261 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
264 unsigned long flags; \
266 raw_local_irq_save(flags); \
268 raw_local_irq_restore(flags); \
272 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
273 static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
277 if (kernel_uses_llsc) { \
280 __asm__ __volatile__( \
281 " .set "MIPS_ISA_LEVEL" \n" \
282 "1: lld %1, %2 # atomic64_" #op "_return\n" \
283 " " #asm_op " %0, %1, %3 \n" \
285 "\t" __scbeqz " %0, 1b \n" \
286 " " #asm_op " %0, %1, %3 \n" \
288 : "=&r" (result), "=&r" (temp), \
289 "+" GCC_OFF_SMALL_ASM() (v->counter) \
292 unsigned long flags; \
294 raw_local_irq_save(flags); \
295 result = v->counter; \
297 v->counter = result; \
298 raw_local_irq_restore(flags); \
304 #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
305 static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \
309 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
312 __asm__ __volatile__( \
313 " .set "MIPS_ISA_LEVEL" \n" \
314 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
315 " " #asm_op " %0, %1, %3 \n" \
317 "\t" __scbeqz " %0, 1b \n" \
320 : "=&r" (result), "=&r" (temp), \
321 "+" GCC_OFF_SMALL_ASM() (v->counter) \
324 unsigned long flags; \
326 raw_local_irq_save(flags); \
327 result = v->counter; \
329 raw_local_irq_restore(flags); \
335 #define ATOMIC64_OPS(op, c_op, asm_op) \
336 ATOMIC64_OP(op, c_op, asm_op) \
337 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
338 ATOMIC64_FETCH_OP(op, c_op, asm_op)
340 ATOMIC64_OPS(add, +=, daddu)
341 ATOMIC64_OPS(sub, -=, dsubu)
343 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
344 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
345 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
346 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
349 #define ATOMIC64_OPS(op, c_op, asm_op) \
350 ATOMIC64_OP(op, c_op, asm_op) \
351 ATOMIC64_FETCH_OP(op, c_op, asm_op)
353 ATOMIC64_OPS(and, &=, and)
354 ATOMIC64_OPS(or, |=, or)
355 ATOMIC64_OPS(xor, ^=, xor)
357 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
358 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
359 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
362 #undef ATOMIC64_FETCH_OP
363 #undef ATOMIC64_OP_RETURN
367 * atomic64_sub_if_positive - conditionally subtract integer from atomic
369 * @i: integer value to subtract
370 * @v: pointer of type atomic64_t
372 * Atomically test @v and subtract @i if @v is greater or equal than @i.
373 * The function returns the old value of @v minus @i.
375 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
379 smp_mb__before_llsc();
381 if (kernel_uses_llsc) {
384 __asm__ __volatile__(
385 " .set "MIPS_ISA_LEVEL" \n"
386 "1: lld %1, %2 # atomic64_sub_if_positive\n"
387 " dsubu %0, %1, %3 \n"
391 "\t" __scbeqz " %1, 1b \n"
394 : "=&r" (result), "=&r" (temp),
395 "+" GCC_OFF_SMALL_ASM() (v->counter)
400 raw_local_irq_save(flags);
405 raw_local_irq_restore(flags);
413 #define atomic64_cmpxchg(v, o, n) \
414 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
415 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
418 * atomic64_dec_if_positive - decrement by 1 if old value positive
419 * @v: pointer of type atomic64_t
421 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
423 #endif /* CONFIG_64BIT */
425 #endif /* _ASM_ATOMIC_H */