2006-08-30 Benjamin Kosnik <bkoz@redhat.com>
authorbkoz <bkoz@138bc75d-0d04-0410-961f-82ee72b054a4>
Wed, 30 Aug 2006 09:39:45 +0000 (09:39 +0000)
committerbkoz <bkoz@138bc75d-0d04-0410-961f-82ee72b054a4>
Wed, 30 Aug 2006 09:39:45 +0000 (09:39 +0000)
            Richard Guenther  <rguenther@suse.de>

* config/abi/pre/gnu.ver: Spell out exact signatures for atomic
access functions.

* include/bits/atomicity.h (__atomic_add_dispatch): Remove
volatile qualification for _Atomic_word argument.
(__atomic_add_single): Same.
(__exchange_and_add_dispatch): Same.
(__exchange_and_add_single): Same.

git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@116584 138bc75d-0d04-0410-961f-82ee72b054a4

libstdc++-v3/ChangeLog
libstdc++-v3/config/abi/pre/gnu.ver
libstdc++-v3/include/bits/atomicity.h

index cf8546c..caa1fcf 100644 (file)
@@ -1,3 +1,15 @@
+2006-08-30  Benjamin Kosnik  <bkoz@redhat.com>
+            Richard Guenther  <rguenther@suse.de>
+       
+       * config/abi/pre/gnu.ver: Spell out exact signatures for atomic
+       access functions.
+
+       * include/bits/atomicity.h (__atomic_add_dispatch): Remove
+       volatile qualification for _Atomic_word argument.
+       (__atomic_add_single): Same.
+       (__exchange_and_add_dispatch): Same.
+       (__exchange_and_add_single): Same.
+       
 2006-08-29  Paolo Carlini  <pcarlini@suse.de>
 
        * include/tr1/random (subtract_with_carry_01<>::_M_initialize_npows):
index ee869a2..1a1cfce 100644 (file)
@@ -378,8 +378,8 @@ GLIBCXX_3.4 {
 
     # __gnu_cxx::__atomic_add
     # __gnu_cxx::__exchange_and_add
-    _ZN9__gnu_cxx12__atomic_add*;
-    _ZN9__gnu_cxx18__exchange_and_add*;
+    _ZN9__gnu_cxx12__atomic_addEPVii;
+    _ZN9__gnu_cxx18__exchange_and_addEPVii;
 
     # debug mode
     _ZN10__gnu_norm15_List_node_base4hook*;
index 18832fe..79af99e 100644 (file)
@@ -60,7 +60,7 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
 #endif
 
   static inline _Atomic_word
-  __exchange_and_add_single(volatile _Atomic_word* __mem, int __val)
+  __exchange_and_add_single(_Atomic_word* __mem, int __val)
   {
     _Atomic_word __result = *__mem;
     *__mem += __val;
@@ -68,12 +68,12 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
   }
 
   static inline void
-  __atomic_add_single(volatile _Atomic_word* __mem, int __val)
+  __atomic_add_single(_Atomic_word* __mem, int __val)
   { *__mem += __val; }
 
   static inline _Atomic_word
   __attribute__ ((__unused__))
-  __exchange_and_add_dispatch(volatile _Atomic_word* __mem, int __val)
+  __exchange_and_add_dispatch(_Atomic_word* __mem, int __val)
   {
 #ifdef __GTHREADS
     if (__gthread_active_p())
@@ -87,7 +87,7 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
 
   static inline void
   __attribute__ ((__unused__))
-  __atomic_add_dispatch(volatile _Atomic_word* __mem, int __val)
+  __atomic_add_dispatch(_Atomic_word* __mem, int __val)
   {
 #ifdef __GTHREADS
     if (__gthread_active_p())
@@ -101,8 +101,9 @@ _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
 
 _GLIBCXX_END_NAMESPACE
 
-// Even if the CPU doesn't need a memory barrier, we need to ensure that
-// the compiler doesn't reorder memory accesses across the barriers.
+// Even if the CPU doesn't need a memory barrier, we need to ensure
+// that the compiler doesn't reorder memory accesses across the
+// barriers.
 #ifndef _GLIBCXX_READ_MEM_BARRIER
 #define _GLIBCXX_READ_MEM_BARRIER __asm __volatile ("":::"memory")
 #endif