1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * g_atomic_*: atomic operations.
5 * Copyright (C) 2003 Sebastian Wilhelmi
6 * Copyright (C) 2007 Nokia Corporation
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
21 * Boston, MA 02111-1307, USA.
26 #if defined (G_ATOMIC_ARM)
31 #include "gthreadprivate.h"
33 #if defined (__GNUC__)
34 # if defined (G_ATOMIC_I486)
35 /* Adapted from CVS version 1.10 of glibc's sysdeps/i386/i486/bits/atomic.h
38 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
43 __asm__ __volatile__ ("lock; xaddl %0,%1"
44 : "=r" (result), "=m" (*atomic)
45 : "0" (val), "m" (*atomic));
50 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
53 __asm__ __volatile__ ("lock; addl %1,%0"
55 : "ir" (val), "m" (*atomic));
59 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
65 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
66 : "=a" (result), "=m" (*atomic)
67 : "r" (newval), "m" (*atomic), "0" (oldval));
69 return result == oldval;
72 /* The same code as above, as on i386 gpointer is 32 bit as well.
73 * Duplicating the code here seems more natural than casting the
74 * arguments and calling the former function */
77 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
83 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
84 : "=a" (result), "=m" (*atomic)
85 : "r" (newval), "m" (*atomic), "0" (oldval));
87 return result == oldval;
90 # elif defined (G_ATOMIC_SPARCV9)
91 /* Adapted from CVS version 1.3 of glibc's sysdeps/sparc/sparc64/bits/atomic.h
93 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
96 __asm__ __volatile__ ("cas [%4], %2, %0" \
97 : "=r" (__result), "=m" (*(atomic)) \
98 : "r" (oldval), "m" (*(atomic)), "r" (atomic),\
100 __result == oldval; \
103 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
105 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
110 __asm__ __volatile__ ("cas [%4], %2, %0"
111 : "=r" (result), "=m" (*atomic)
112 : "r" (oldval), "m" (*atomic), "r" (atomic),
114 return result == oldval;
116 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
118 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
123 gpointer *a = atomic;
124 __asm__ __volatile__ ("casx [%4], %2, %0"
125 : "=r" (result), "=m" (*a)
126 : "r" (oldval), "m" (*a), "r" (a),
128 return result == oldval;
130 # else /* What's that */
131 # error "Your system has an unsupported pointer size"
132 # endif /* GLIB_SIZEOF_VOID_P */
133 # define G_ATOMIC_MEMORY_BARRIER \
134 __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" \
135 " | #StoreLoad | #StoreStore" : : : "memory")
137 # elif defined (G_ATOMIC_ALPHA)
138 /* Adapted from CVS version 1.3 of glibc's sysdeps/alpha/bits/atomic.h
140 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
144 __asm__ __volatile__ ( \
147 " cmpeq %0,%3,%1\n" \
162 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
164 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
170 __asm__ __volatile__ (
188 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
190 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
196 __asm__ __volatile__ (
214 # else /* What's that */
215 # error "Your system has an unsupported pointer size"
216 # endif /* GLIB_SIZEOF_VOID_P */
217 # define G_ATOMIC_MEMORY_BARRIER __asm__ ("mb" : : : "memory")
218 # elif defined (G_ATOMIC_X86_64)
219 /* Adapted from CVS version 1.9 of glibc's sysdeps/x86_64/bits/atomic.h
222 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
227 __asm__ __volatile__ ("lock; xaddl %0,%1"
228 : "=r" (result), "=m" (*atomic)
229 : "0" (val), "m" (*atomic));
234 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
237 __asm__ __volatile__ ("lock; addl %1,%0"
239 : "ir" (val), "m" (*atomic));
243 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
249 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
250 : "=a" (result), "=m" (*atomic)
251 : "r" (newval), "m" (*atomic), "0" (oldval));
253 return result == oldval;
257 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
263 __asm__ __volatile__ ("lock; cmpxchgq %q2, %1"
264 : "=a" (result), "=m" (*atomic)
265 : "r" (newval), "m" (*atomic), "0" (oldval));
267 return result == oldval;
270 # elif defined (G_ATOMIC_POWERPC)
271 /* Adapted from CVS version 1.16 of glibc's sysdeps/powerpc/bits/atomic.h
272 * and CVS version 1.4 of glibc's sysdeps/powerpc/powerpc32/bits/atomic.h
273 * and CVS version 1.7 of glibc's sysdeps/powerpc/powerpc64/bits/atomic.h
276 /* Non-optimizing compile bails on the following two asm statements
277 * for reasons unknown to the author */
279 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
283 #if ASM_NUMERIC_LABELS
284 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
288 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
289 : "b" (atomic), "r" (val), "m" (*atomic)
292 __asm__ __volatile__ (".Lieaa%=: lwarx %0,0,%3\n"
296 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
297 : "b" (atomic), "r" (val), "m" (*atomic)
303 /* The same as above, to save a function call repeated here */
305 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
309 #if ASM_NUMERIC_LABELS
310 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
314 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
315 : "b" (atomic), "r" (val), "m" (*atomic)
318 __asm__ __volatile__ (".Lia%=: lwarx %0,0,%3\n"
322 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
323 : "b" (atomic), "r" (val), "m" (*atomic)
327 # else /* !__OPTIMIZE__ */
329 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
335 while (!g_atomic_int_compare_and_exchange (atomic, result, result + val));
341 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
347 while (!g_atomic_int_compare_and_exchange (atomic, result, result + val));
349 # endif /* !__OPTIMIZE__ */
351 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
353 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
358 #if ASM_NUMERIC_LABELS
359 __asm__ __volatile__ ("sync\n"
367 : "b" (atomic), "r" (oldval), "r" (newval)
370 __asm__ __volatile__ ("sync\n"
371 ".L1icae%=: lwarx %0,0,%1\n"
378 : "b" (atomic), "r" (oldval), "r" (newval)
385 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
390 #if ASM_NUMERIC_LABELS
391 __asm__ __volatile__ ("sync\n"
399 : "b" (atomic), "r" (oldval), "r" (newval)
402 __asm__ __volatile__ ("sync\n"
403 ".L1pcae%=: lwarx %0,0,%1\n"
410 : "b" (atomic), "r" (oldval), "r" (newval)
415 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
417 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
422 #if ASM_NUMERIC_LABELS
423 __asm__ __volatile__ ("sync\n"
432 : "b" (atomic), "r" (oldval), "r" (newval)
435 __asm__ __volatile__ ("sync\n"
436 ".L1icae%=: lwarx %0,0,%1\n"
444 : "b" (atomic), "r" (oldval), "r" (newval)
451 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
456 #if ASM_NUMERIC_LABELS
457 __asm__ __volatile__ ("sync\n"
465 : "b" (atomic), "r" (oldval), "r" (newval)
468 __asm__ __volatile__ ("sync\n"
469 ".L1pcae%=: ldarx %0,0,%1\n"
476 : "b" (atomic), "r" (oldval), "r" (newval)
481 # else /* What's that */
482 # error "Your system has an unsupported pointer size"
483 # endif /* GLIB_SIZEOF_VOID_P */
485 # define G_ATOMIC_MEMORY_BARRIER __asm__ ("sync" : : : "memory")
487 # elif defined (G_ATOMIC_IA64)
488 /* Adapted from CVS version 1.8 of glibc's sysdeps/ia64/bits/atomic.h
491 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
494 return __sync_fetch_and_add (atomic, val);
498 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
501 __sync_fetch_and_add (atomic, val);
505 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
509 return __sync_bool_compare_and_swap (atomic, oldval, newval);
513 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
517 return __sync_bool_compare_and_swap ((long *)atomic,
518 (long)oldval, (long)newval);
521 # define G_ATOMIC_MEMORY_BARRIER __sync_synchronize ()
522 # elif defined (G_ATOMIC_S390)
523 /* Adapted from glibc's sysdeps/s390/bits/atomic.h
525 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
527 gint __result = oldval; \
528 __asm__ __volatile__ ("cs %0, %2, %1" \
529 : "+d" (__result), "=Q" (*(atomic)) \
530 : "d" (newval), "m" (*(atomic)) : "cc" ); \
531 __result == oldval; \
534 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
536 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
540 gpointer result = oldval;
541 __asm__ __volatile__ ("cs %0, %2, %1"
542 : "+d" (result), "=Q" (*(atomic))
543 : "d" (newval), "m" (*(atomic)) : "cc" );
544 return result == oldval;
546 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
548 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
552 gpointer result = oldval;
553 gpointer *a = atomic;
554 __asm__ __volatile__ ("csg %0, %2, %1"
555 : "+d" (result), "=Q" (*a)
556 : "d" ((long)(newval)), "m" (*a) : "cc" );
557 return result == oldval;
559 # else /* What's that */
560 # error "Your system has an unsupported pointer size"
561 # endif /* GLIB_SIZEOF_VOID_P */
562 # elif defined (G_ATOMIC_ARM)
563 static volatile int atomic_spin = 0;
565 static int atomic_spin_trylock (void)
572 : "r,0" (1), "r,r" (&atomic_spin)
580 static void atomic_spin_lock (void)
582 while (atomic_spin_trylock())
586 static void atomic_spin_unlock (void)
592 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
600 atomic_spin_unlock();
606 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
611 atomic_spin_unlock();
615 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
622 if (*atomic == oldval)
629 atomic_spin_unlock();
635 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
642 if (*atomic == oldval)
649 atomic_spin_unlock();
653 # elif defined (G_ATOMIC_CRIS) || defined (G_ATOMIC_CRISV32)
654 # ifdef G_ATOMIC_CRIS
655 # define CRIS_ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
658 __asm__ __volatile__ ("\n" \
660 "cmp.d [%[Atomic]], %[OldVal]\n\t" \
663 "move.d %[NewVal], [%[Atomic]]\n\t" \
665 "1:\tseq %[Result]" \
666 : [Result] "=&r" (__result), \
668 : [Atomic] "r" (atomic), \
669 [OldVal] "r" (oldval), \
670 [NewVal] "r" (newval), \
671 "g" (*(gpointer*) (atomic)) \
676 # define CRIS_ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
679 __asm__ __volatile__ ("\n" \
681 "cmp.d [%[Atomic]], %[OldVal]\n\t" \
684 "move.d %[NewVal], [%[Atomic]]\n\t" \
686 "1:\tseq %[Result]" \
687 : [Result] "=&r" (__result), \
689 : [Atomic] "r" (atomic), \
690 [OldVal] "r" (oldval), \
691 [NewVal] "r" (newval), \
692 "g" (*(gpointer*) (atomic)) \
698 #define CRIS_CACHELINE_SIZE 32
699 #define CRIS_ATOMIC_BREAKS_CACHELINE(atomic) \
700 (((gulong)(atomic) & (CRIS_CACHELINE_SIZE - 1)) > (CRIS_CACHELINE_SIZE - sizeof (atomic)))
702 gint __g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
704 void __g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
706 gboolean __g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
709 gboolean __g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
714 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
718 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
719 return __g_atomic_pointer_compare_and_exchange (atomic, oldval, newval);
721 return CRIS_ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
725 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
729 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
730 return __g_atomic_int_compare_and_exchange (atomic, oldval, newval);
732 return CRIS_ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
736 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
741 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
742 return __g_atomic_int_exchange_and_add (atomic, val);
746 while (!CRIS_ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
752 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
757 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
758 return __g_atomic_int_add (atomic, val);
762 while (!CRIS_ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
765 /* We need the atomic mutex for atomic operations where the atomic variable
766 * breaks the 32 byte cache line since the CRIS architecture does not support
767 * atomic operations on such variables. Fortunately this should be rare.
769 # define DEFINE_WITH_MUTEXES
770 # define g_atomic_int_exchange_and_add __g_atomic_int_exchange_and_add
771 # define g_atomic_int_add __g_atomic_int_add
772 # define g_atomic_int_compare_and_exchange __g_atomic_int_compare_and_exchange
773 # define g_atomic_pointer_compare_and_exchange __g_atomic_pointer_compare_and_exchange
775 # else /* !G_ATOMIC_* */
776 # define DEFINE_WITH_MUTEXES
777 # endif /* G_ATOMIC_* */
778 #else /* !__GNUC__ */
779 # ifdef G_PLATFORM_WIN32
780 # define DEFINE_WITH_WIN32_INTERLOCKED
782 # define DEFINE_WITH_MUTEXES
784 #endif /* __GNUC__ */
786 #ifdef DEFINE_WITH_WIN32_INTERLOCKED
787 # include <windows.h>
788 /* Following indicates that InterlockedCompareExchangePointer is
789 * declared in winbase.h (included by windows.h) and needs to be
790 * commented out if not true. It is defined iff WINVER > 0x0400,
791 * which is usually correct but can be wrong if WINVER is set before
792 * windows.h is included.
795 # define HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
799 g_atomic_int_exchange_and_add (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
802 return InterlockedExchangeAdd (atomic, val);
806 g_atomic_int_add (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
809 InterlockedExchangeAdd (atomic, val);
813 g_atomic_int_compare_and_exchange (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
817 #ifndef HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
818 return (guint32) InterlockedCompareExchange ((PVOID*)atomic,
820 (PVOID)oldval) == oldval;
822 return InterlockedCompareExchange (atomic,
829 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
833 # ifdef HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
834 return InterlockedCompareExchangePointer (atomic, newval, oldval) == oldval;
836 # if GLIB_SIZEOF_VOID_P != 4 /* no 32-bit system */
837 # error "InterlockedCompareExchangePointer needed"
839 return InterlockedCompareExchange (atomic, newval, oldval) == oldval;
843 #endif /* DEFINE_WITH_WIN32_INTERLOCKED */
845 #ifdef DEFINE_WITH_MUTEXES
846 /* We have to use the slow, but safe locking method */
847 static GMutex *g_atomic_mutex;
850 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
855 g_mutex_lock (g_atomic_mutex);
858 g_mutex_unlock (g_atomic_mutex);
865 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
868 g_mutex_lock (g_atomic_mutex);
870 g_mutex_unlock (g_atomic_mutex);
874 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
880 g_mutex_lock (g_atomic_mutex);
881 if (*atomic == oldval)
888 g_mutex_unlock (g_atomic_mutex);
894 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
900 g_mutex_lock (g_atomic_mutex);
901 if (*atomic == oldval)
908 g_mutex_unlock (g_atomic_mutex);
913 #ifdef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
915 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
919 g_mutex_lock (g_atomic_mutex);
921 g_mutex_unlock (g_atomic_mutex);
927 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
930 g_mutex_lock (g_atomic_mutex);
932 g_mutex_unlock (g_atomic_mutex);
936 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
940 g_mutex_lock (g_atomic_mutex);
942 g_mutex_unlock (g_atomic_mutex);
948 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
951 g_mutex_lock (g_atomic_mutex);
953 g_mutex_unlock (g_atomic_mutex);
955 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
956 #elif defined (G_ATOMIC_OP_MEMORY_BARRIER_NEEDED)
958 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
960 G_ATOMIC_MEMORY_BARRIER;
965 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
969 G_ATOMIC_MEMORY_BARRIER;
973 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
975 G_ATOMIC_MEMORY_BARRIER;
980 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
984 G_ATOMIC_MEMORY_BARRIER;
986 #endif /* DEFINE_WITH_MUTEXES || G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
988 #ifdef ATOMIC_INT_CMP_XCHG
990 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
994 return ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
998 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
1004 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
1010 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
1016 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
1018 #endif /* ATOMIC_INT_CMP_XCHG */
1021 _g_atomic_thread_init (void)
1023 #ifdef DEFINE_WITH_MUTEXES
1024 g_atomic_mutex = g_mutex_new ();
1025 #endif /* DEFINE_WITH_MUTEXES */
1028 #ifndef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
1030 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1032 return g_atomic_int_get (atomic);
1036 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1039 g_atomic_int_set (atomic, newval);
1043 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1045 return g_atomic_pointer_get (atomic);
1049 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1052 g_atomic_pointer_set (atomic, newval);
1054 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */