ATOMIC_OP(atomic_and)
ATOMIC_OP(atomic_sub)
+#undef ATOMIC_OP
#define ATOMIC_FETCH_OP(name, op) \
static inline int arch_##op##name(int i, atomic_t *v) \
ATOMIC_FETCH_OPS(atomic_add_return)
ATOMIC_FETCH_OPS(atomic_sub_return)
+#undef ATOMIC_FETCH_OP
+#undef ATOMIC_FETCH_OPS
#define ATOMIC64_OP(op) \
static inline void arch_##op(long i, atomic64_t *v) \
ATOMIC64_OP(atomic64_and)
ATOMIC64_OP(atomic64_sub)
+#undef ATOMIC64_OP
#define ATOMIC64_FETCH_OP(name, op) \
static inline long arch_##op##name(long i, atomic64_t *v) \
ATOMIC64_FETCH_OPS(atomic64_add_return)
ATOMIC64_FETCH_OPS(atomic64_sub_return)
+#undef ATOMIC64_FETCH_OP
+#undef ATOMIC64_FETCH_OPS
+
static inline long arch_atomic64_dec_if_positive(atomic64_t *v)
{
return __lse_ll_sc_body(atomic64_dec_if_positive, v);
__CMPXCHG_CASE(mb_, 32)
__CMPXCHG_CASE(mb_, 64)
+#undef __CMPXCHG_CASE
+
#define __CMPXCHG_DBL(name) \
static inline long __cmpxchg_double##name(unsigned long old1, \
unsigned long old2, \
__CMPXCHG_DBL( )
__CMPXCHG_DBL(_mb)
+#undef __CMPXCHG_DBL
+
#define __CMPXCHG_GEN(sfx) \
static inline unsigned long __cmpxchg##sfx(volatile void *ptr, \
unsigned long old, \