atomic: cleanup asm-generic atomic*.h inclusion
authorArun Sharma <asharma@fb.com>
Tue, 26 Jul 2011 23:09:08 +0000 (16:09 -0700)
committerLinus Torvalds <torvalds@linux-foundation.org>
Tue, 26 Jul 2011 23:49:47 +0000 (16:49 -0700)
After changing all consumers of atomics to include <linux/atomic.h>, we
ran into some compile time errors due to this dependency chain:

linux/atomic.h
  -> asm/atomic.h
    -> asm-generic/atomic-long.h

where atomic-long.h could use funcs defined later in linux/atomic.h
without a prototype.  This patches moves the code that includes
asm-generic/atomic*.h to linux/atomic.h.

Archs that need <asm-generic/atomic64.h> need to select
CONFIG_GENERIC_ATOMIC64 from now on (some of them used to include it
unconditionally).

Compile tested on i386 and x86_64 with allnoconfig.

Signed-off-by: Arun Sharma <asharma@fb.com>
Cc: Eric Dumazet <eric.dumazet@gmail.com>
Cc: Ingo Molnar <mingo@elte.hu>
Cc: David Miller <davem@davemloft.net>
Acked-by: Mike Frysinger <vapier@gentoo.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
23 files changed:
arch/alpha/include/asm/atomic.h
arch/arm/include/asm/atomic.h
arch/avr32/include/asm/atomic.h
arch/blackfin/include/asm/atomic.h
arch/cris/include/asm/atomic.h
arch/frv/include/asm/atomic.h
arch/h8300/include/asm/atomic.h
arch/ia64/include/asm/atomic.h
arch/m32r/include/asm/atomic.h
arch/m68k/include/asm/atomic.h
arch/mips/include/asm/atomic.h
arch/mn10300/include/asm/atomic.h
arch/parisc/include/asm/atomic.h
arch/powerpc/include/asm/atomic.h
arch/s390/include/asm/atomic.h
arch/sh/include/asm/atomic.h
arch/sparc/include/asm/atomic_32.h
arch/sparc/include/asm/atomic_64.h
arch/tile/include/asm/atomic.h
arch/x86/include/asm/atomic.h
arch/xtensa/include/asm/atomic.h
include/asm-generic/atomic.h
include/linux/atomic.h

index 3d67049..640f909 100644 (file)
@@ -255,5 +255,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 #define smp_mb__before_atomic_inc()    smp_mb()
 #define smp_mb__after_atomic_inc()     smp_mb()
 
-#include <asm-generic/atomic-long.h>
 #endif /* _ALPHA_ATOMIC_H */
index 3757e91..86976d0 100644 (file)
@@ -459,9 +459,6 @@ static inline int atomic64_add_unless(atomic64_t *v, u64 a, u64 u)
 #define atomic64_dec_and_test(v)       (atomic64_dec_return((v)) == 0)
 #define atomic64_inc_not_zero(v)       atomic64_add_unless((v), 1LL, 0LL)
 
-#else /* !CONFIG_GENERIC_ATOMIC64 */
-#include <asm-generic/atomic64.h>
-#endif
-#include <asm-generic/atomic-long.h>
+#endif /* !CONFIG_GENERIC_ATOMIC64 */
 #endif
 #endif
index dc6c3a4..e0ac263 100644 (file)
@@ -188,6 +188,4 @@ static inline int atomic_sub_if_positive(int i, atomic_t *v)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
-
 #endif /*  __ASM_AVR32_ATOMIC_H */
index 292c86f..1352256 100644 (file)
@@ -111,10 +111,7 @@ static inline void atomic_set_mask(int mask, atomic_t *v)
 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
 
-#include <asm-generic/atomic-long.h>
 
 #endif
 
-#include <asm-generic/atomic64.h>
-
 #endif
index 7e90532..bbf0938 100644 (file)
@@ -157,5 +157,4 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
 #endif
index a51dcdf..0d8a7d6 100644 (file)
@@ -257,5 +257,4 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 }
 
 
-#include <asm-generic/atomic-long.h>
 #endif /* _ASM_ATOMIC_H */
index e6d1663..f5a38c1 100644 (file)
@@ -145,5 +145,4 @@ static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc() barrier()
 
-#include <asm-generic/atomic-long.h>
 #endif /* __ARCH_H8300_ATOMIC __ */
index 22aca21..3fad89e 100644 (file)
@@ -215,5 +215,4 @@ atomic64_add_negative (__s64 i, atomic64_t *v)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
 #endif /* _ASM_IA64_ATOMIC_H */
index c839426..1e7f29f 100644 (file)
@@ -313,5 +313,4 @@ static __inline__ void atomic_set_mask(unsigned long  mask, atomic_t *addr)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
 #endif /* _ASM_M32R_ATOMIC_H */
index 2269350..65c6be6 100644 (file)
@@ -205,6 +205,4 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
-#include <asm-generic/atomic64.h>
 #endif /* __ARCH_M68K_ATOMIC __ */
index 31cb23d..1d93f81 100644 (file)
@@ -765,10 +765,6 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  */
 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
 
-#else /* !CONFIG_64BIT */
-
-#include <asm-generic/atomic64.h>
-
 #endif /* CONFIG_64BIT */
 
 /*
@@ -780,6 +776,4 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 #define smp_mb__before_atomic_inc()    smp_mb__before_llsc()
 #define smp_mb__after_atomic_inc()     smp_llsc_mb()
 
-#include <asm-generic/atomic-long.h>
-
 #endif /* _ASM_ATOMIC_H */
index a2e6759..b9a8f84 100644 (file)
@@ -343,8 +343,6 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
-
 #endif /* __KERNEL__ */
 #endif /* CONFIG_SMP */
 #endif /* _ASM_ATOMIC_H */
index 1914f17..b1dc71f 100644 (file)
@@ -335,12 +335,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 
 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 
-#else /* CONFIG_64BIT */
-
-#include <asm-generic/atomic64.h>
-
 #endif /* !CONFIG_64BIT */
 
-#include <asm-generic/atomic-long.h>
 
 #endif /* _ASM_PARISC_ATOMIC_H_ */
index 952e161..e2a4c26 100644 (file)
@@ -469,11 +469,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 
 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 
-#else  /* __powerpc64__ */
-#include <asm-generic/atomic64.h>
-
 #endif /* __powerpc64__ */
 
-#include <asm-generic/atomic-long.h>
 #endif /* __KERNEL__ */
 #endif /* _ASM_POWERPC_ATOMIC_H_ */
index 7b0b0a7..8517d2a 100644 (file)
@@ -331,6 +331,4 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
 #define smp_mb__before_atomic_inc()    smp_mb()
 #define smp_mb__after_atomic_inc()     smp_mb()
 
-#include <asm-generic/atomic-long.h>
-
 #endif /* __ARCH_S390_ATOMIC__  */
index 2177596..63a27db 100644 (file)
@@ -67,7 +67,4 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
 #define smp_mb__before_atomic_inc()    smp_mb()
 #define smp_mb__after_atomic_inc()     smp_mb()
 
-#include <asm-generic/atomic-long.h>
-#include <asm-generic/atomic64.h>
-
 #endif /* __ASM_SH_ATOMIC_H */
index bdce95e..5c3c8b6 100644 (file)
@@ -160,5 +160,4 @@ static inline int __atomic24_sub(int i, atomic24_t *v)
 
 #endif /* !(__KERNEL__) */
 
-#include <asm-generic/atomic-long.h>
 #endif /* !(__ARCH_SPARC_ATOMIC__) */
index 1fc3d0a..9f421df 100644 (file)
@@ -113,5 +113,4 @@ static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
index e327271..921dbeb 100644 (file)
@@ -177,9 +177,4 @@ extern unsigned long __cmpxchg_called_with_bad_pointer(void);
 #include <asm/atomic_64.h>
 #endif
 
-/* Provide the appropriate atomic_long_t definitions. */
-#ifndef __ASSEMBLY__
-#include <asm-generic/atomic-long.h>
-#endif
-
 #endif /* _ASM_TILE_ATOMIC_H */
index 5fe9cb3..10572e3 100644 (file)
@@ -318,5 +318,4 @@ static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
 # include "atomic64_64.h"
 #endif
 
-#include <asm-generic/atomic-long.h>
 #endif /* _ASM_X86_ATOMIC_H */
index e464212..23592ef 100644 (file)
@@ -291,7 +291,6 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
 #endif /* __KERNEL__ */
 
 #endif /* _XTENSA_ATOMIC_H */
index bd18bfd..a8fad94 100644 (file)
@@ -154,7 +154,5 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
 #define smp_mb__before_atomic_inc()    barrier()
 #define smp_mb__after_atomic_inc()     barrier()
 
-#include <asm-generic/atomic-long.h>
-
 #endif /* __KERNEL__ */
 #endif /* __ASM_GENERIC_ATOMIC_H */
index d5e167a..42e7f63 100644 (file)
@@ -96,4 +96,8 @@ static inline void atomic_or(int i, atomic_t *v)
 }
 #endif /* #ifndef CONFIG_ARCH_HAS_ATOMIC_OR */
 
+#include <asm-generic/atomic-long.h>
+#ifdef CONFIG_GENERIC_ATOMIC64
+#include <asm-generic/atomic64.h>
+#endif
 #endif /* _LINUX_ATOMIC_H */