1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_
3 #define _ASM_GENERIC_BITOPS_ATOMIC_H_
5 #include <linux/atomic.h>
6 #include <linux/compiler.h>
7 #include <asm/barrier.h>
10 * Implementation of atomic bitops using atomic-fetch ops.
11 * See Documentation/atomic_bitops.txt for details.
14 static __always_inline void
15 arch_set_bit(unsigned int nr, volatile unsigned long *p)
18 arch_atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p);
21 static __always_inline void
22 arch_clear_bit(unsigned int nr, volatile unsigned long *p)
25 arch_atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p);
28 static __always_inline void
29 arch_change_bit(unsigned int nr, volatile unsigned long *p)
32 arch_atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p);
35 static __always_inline int
36 arch_test_and_set_bit(unsigned int nr, volatile unsigned long *p)
39 unsigned long mask = BIT_MASK(nr);
42 old = arch_atomic_long_fetch_or(mask, (atomic_long_t *)p);
43 return !!(old & mask);
46 static __always_inline int
47 arch_test_and_clear_bit(unsigned int nr, volatile unsigned long *p)
50 unsigned long mask = BIT_MASK(nr);
53 old = arch_atomic_long_fetch_andnot(mask, (atomic_long_t *)p);
54 return !!(old & mask);
57 static __always_inline int
58 arch_test_and_change_bit(unsigned int nr, volatile unsigned long *p)
61 unsigned long mask = BIT_MASK(nr);
64 old = arch_atomic_long_fetch_xor(mask, (atomic_long_t *)p);
65 return !!(old & mask);
68 #include <asm-generic/bitops/instrumented-atomic.h>
70 #endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */