Use built-in atomics for load/store/CAS for Clang by default (Aarch64)
authorIvan Maidanski <ivmai@mail.ru>
Tue, 5 Dec 2017 09:03:51 +0000 (12:03 +0300)
committerIvan Maidanski <ivmai@mail.ru>
Fri, 22 Dec 2017 07:44:15 +0000 (10:44 +0300)
* src/atomic_ops/sysdeps/gcc/aarch64.h [!AO_PREFER_BUILTIN_ATOMICS]
(AO_double_load, AO_double_load_acquire, AO_double_store,
AO_double_store_release, AO_double_compare_and_swap,
AO_double_compare_and_swap_acquire, AO_double_compare_and_swap_release,
AO_double_compare_and_swap_full): Do not define (using asm) if
AO_THREAD_SANITIZER or if Clang (unless AO_AARCH64_ASM_LOAD_STORE_CAS).

src/atomic_ops/sysdeps/gcc/aarch64.h

index f430007..90e9e06 100644 (file)
 # define AO_SKIPATOMIC_double_load_acquire
 #endif
 
-#if !defined(AO_PREFER_BUILTIN_ATOMICS)
+/* As of gcc-5.4, all built-in load/store and CAS atomics for double    */
+/* word require -latomic (and are not lock-free), so we use the         */
+/* asm-based implementation by default.                                 */
+/* TODO: Update it when GCC has lock-free double-word load/store/CAS.   */
+#if !defined(AO_PREFER_BUILTIN_ATOMICS) && !defined(AO_THREAD_SANITIZER) \
+    && (!defined(__clang__) || defined(AO_AARCH64_ASM_LOAD_STORE_CAS))
 
   AO_INLINE AO_double_t
   AO_double_load(const volatile AO_double_t *addr)
@@ -76,9 +81,6 @@
   }
 # define AO_HAVE_double_load_acquire
 
-  /* As of gcc 5.0, all built-in store and CAS atomics for double       */
-  /* word require -latomic, so use asm-based implementation by default. */
-
   AO_INLINE void
   AO_double_store(volatile AO_double_t *addr, AO_double_t value)
   {
   }
 # define AO_HAVE_double_compare_and_swap_full
 
-#endif /* !AO_PREFER_BUILTIN_ATOMICS */
+#endif /* !AO_PREFER_BUILTIN_ATOMICS && !__clang__ */
 
 /* As of clang-5.0, __GCC_HAVE_SYNC_COMPARE_AND_SWAP_16                 */
 /* macro is still missing (while the double-word CAS is available).     */