1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * g_atomic_*: atomic operations.
5 * Copyright (C) 2003 Sebastian Wilhelmi
6 * Copyright (C) 2007 Nokia Corporation
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
21 * Boston, MA 02111-1307, USA.
26 #if defined (G_ATOMIC_ARM)
32 #include "gthreadprivate.h"
35 * SECTION:atomic_operations
36 * @title: Atomic Operations
37 * @short_description: basic atomic integer and pointer operations
40 * The following functions can be used to atomically access integers and
41 * pointers. They are implemented as inline assembler function on most
42 * platforms and use slower fall-backs otherwise. Using them can sometimes
43 * save you from using a performance-expensive #GMutex to protect the
46 * The most important usage is reference counting. Using
47 * g_atomic_int_inc() and g_atomic_int_dec_and_test() makes reference
48 * counting a very fast operation.
50 * <note><para>You must not directly read integers or pointers concurrently
51 * accessed by multiple threads, but use the atomic accessor functions
52 * instead. That is, always use g_atomic_int_get() and g_atomic_pointer_get()
53 * for read outs. They provide the neccessary synchonization mechanisms
54 * like memory barriers to access memory locations concurrently.
57 * <note><para>If you are using those functions for anything apart from
58 * simple reference counting, you should really be aware of the implications
59 * of doing that. There are literally thousands of ways to shoot yourself
60 * in the foot. So if in doubt, use a #GMutex. If you don't know, what
61 * memory barriers are, do not use anything but g_atomic_int_inc() and
62 * g_atomic_int_dec_and_test().
65 * <note><para>It is not safe to set an integer or pointer just by assigning
66 * to it, when it is concurrently accessed by other threads with the following
67 * functions. Use g_atomic_int_compare_and_exchange() or
68 * g_atomic_pointer_compare_and_exchange() respectively.
72 #if defined (__GNUC__)
73 # if defined (G_ATOMIC_I486)
74 /* Adapted from CVS version 1.10 of glibc's sysdeps/i386/i486/bits/atomic.h
77 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
82 __asm__ __volatile__ ("lock; xaddl %0,%1"
83 : "=r" (result), "=m" (*atomic)
84 : "0" (val), "m" (*atomic));
89 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
92 __asm__ __volatile__ ("lock; addl %1,%0"
94 : "ir" (val), "m" (*atomic));
98 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
104 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
105 : "=a" (result), "=m" (*atomic)
106 : "r" (newval), "m" (*atomic), "0" (oldval));
108 return result == oldval;
111 /* The same code as above, as on i386 gpointer is 32 bit as well.
112 * Duplicating the code here seems more natural than casting the
113 * arguments and calling the former function */
116 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
122 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
123 : "=a" (result), "=m" (*atomic)
124 : "r" (newval), "m" (*atomic), "0" (oldval));
126 return result == oldval;
129 # elif defined (G_ATOMIC_SPARCV9)
130 /* Adapted from CVS version 1.3 of glibc's sysdeps/sparc/sparc64/bits/atomic.h
132 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
135 __asm__ __volatile__ ("cas [%4], %2, %0" \
136 : "=r" (__result), "=m" (*(atomic)) \
137 : "r" (oldval), "m" (*(atomic)), "r" (atomic),\
139 __result == oldval; \
142 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
144 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
149 __asm__ __volatile__ ("cas [%4], %2, %0"
150 : "=r" (result), "=m" (*atomic)
151 : "r" (oldval), "m" (*atomic), "r" (atomic),
153 return result == oldval;
155 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
157 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
162 gpointer *a = atomic;
163 __asm__ __volatile__ ("casx [%4], %2, %0"
164 : "=r" (result), "=m" (*a)
165 : "r" (oldval), "m" (*a), "r" (a),
167 return result == oldval;
169 # else /* What's that */
170 # error "Your system has an unsupported pointer size"
171 # endif /* GLIB_SIZEOF_VOID_P */
172 # define G_ATOMIC_MEMORY_BARRIER \
173 __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" \
174 " | #StoreLoad | #StoreStore" : : : "memory")
176 # elif defined (G_ATOMIC_ALPHA)
177 /* Adapted from CVS version 1.3 of glibc's sysdeps/alpha/bits/atomic.h
179 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
183 __asm__ __volatile__ ( \
186 " cmpeq %0,%3,%1\n" \
201 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
203 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
209 __asm__ __volatile__ (
227 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
229 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
235 __asm__ __volatile__ (
253 # else /* What's that */
254 # error "Your system has an unsupported pointer size"
255 # endif /* GLIB_SIZEOF_VOID_P */
256 # define G_ATOMIC_MEMORY_BARRIER __asm__ ("mb" : : : "memory")
257 # elif defined (G_ATOMIC_X86_64)
258 /* Adapted from CVS version 1.9 of glibc's sysdeps/x86_64/bits/atomic.h
261 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
266 __asm__ __volatile__ ("lock; xaddl %0,%1"
267 : "=r" (result), "=m" (*atomic)
268 : "0" (val), "m" (*atomic));
273 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
276 __asm__ __volatile__ ("lock; addl %1,%0"
278 : "ir" (val), "m" (*atomic));
282 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
288 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
289 : "=a" (result), "=m" (*atomic)
290 : "r" (newval), "m" (*atomic), "0" (oldval));
292 return result == oldval;
296 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
302 __asm__ __volatile__ ("lock; cmpxchgq %q2, %1"
303 : "=a" (result), "=m" (*atomic)
304 : "r" (newval), "m" (*atomic), "0" (oldval));
306 return result == oldval;
309 # elif defined (G_ATOMIC_POWERPC)
310 /* Adapted from CVS version 1.16 of glibc's sysdeps/powerpc/bits/atomic.h
311 * and CVS version 1.4 of glibc's sysdeps/powerpc/powerpc32/bits/atomic.h
312 * and CVS version 1.7 of glibc's sysdeps/powerpc/powerpc64/bits/atomic.h
315 /* Non-optimizing compile bails on the following two asm statements
316 * for reasons unknown to the author */
318 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
322 #if ASM_NUMERIC_LABELS
323 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
327 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
328 : "b" (atomic), "r" (val), "m" (*atomic)
331 __asm__ __volatile__ (".Lieaa%=: lwarx %0,0,%3\n"
335 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
336 : "b" (atomic), "r" (val), "m" (*atomic)
342 /* The same as above, to save a function call repeated here */
344 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
348 #if ASM_NUMERIC_LABELS
349 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
353 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
354 : "b" (atomic), "r" (val), "m" (*atomic)
357 __asm__ __volatile__ (".Lia%=: lwarx %0,0,%3\n"
361 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
362 : "b" (atomic), "r" (val), "m" (*atomic)
366 # else /* !__OPTIMIZE__ */
368 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
374 while (!g_atomic_int_compare_and_exchange (atomic, result, result + val));
380 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
386 while (!g_atomic_int_compare_and_exchange (atomic, result, result + val));
388 # endif /* !__OPTIMIZE__ */
390 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
392 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
397 #if ASM_NUMERIC_LABELS
398 __asm__ __volatile__ ("sync\n"
406 : "b" (atomic), "r" (oldval), "r" (newval)
409 __asm__ __volatile__ ("sync\n"
410 ".L1icae%=: lwarx %0,0,%1\n"
417 : "b" (atomic), "r" (oldval), "r" (newval)
424 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
429 #if ASM_NUMERIC_LABELS
430 __asm__ __volatile__ ("sync\n"
438 : "b" (atomic), "r" (oldval), "r" (newval)
441 __asm__ __volatile__ ("sync\n"
442 ".L1pcae%=: lwarx %0,0,%1\n"
449 : "b" (atomic), "r" (oldval), "r" (newval)
454 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
456 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
461 #if ASM_NUMERIC_LABELS
462 __asm__ __volatile__ ("sync\n"
471 : "b" (atomic), "r" (oldval), "r" (newval)
474 __asm__ __volatile__ ("sync\n"
475 ".L1icae%=: lwarx %0,0,%1\n"
483 : "b" (atomic), "r" (oldval), "r" (newval)
490 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
495 #if ASM_NUMERIC_LABELS
496 __asm__ __volatile__ ("sync\n"
504 : "b" (atomic), "r" (oldval), "r" (newval)
507 __asm__ __volatile__ ("sync\n"
508 ".L1pcae%=: ldarx %0,0,%1\n"
515 : "b" (atomic), "r" (oldval), "r" (newval)
520 # else /* What's that */
521 # error "Your system has an unsupported pointer size"
522 # endif /* GLIB_SIZEOF_VOID_P */
524 # define G_ATOMIC_MEMORY_BARRIER __asm__ ("sync" : : : "memory")
526 # elif defined (G_ATOMIC_IA64)
527 /* Adapted from CVS version 1.8 of glibc's sysdeps/ia64/bits/atomic.h
530 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
533 return __sync_fetch_and_add (atomic, val);
537 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
540 __sync_fetch_and_add (atomic, val);
544 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
548 return __sync_bool_compare_and_swap (atomic, oldval, newval);
552 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
556 return __sync_bool_compare_and_swap ((long *)atomic,
557 (long)oldval, (long)newval);
560 # define G_ATOMIC_MEMORY_BARRIER __sync_synchronize ()
561 # elif defined (G_ATOMIC_S390)
562 /* Adapted from glibc's sysdeps/s390/bits/atomic.h
564 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
566 gint __result = oldval; \
567 __asm__ __volatile__ ("cs %0, %2, %1" \
568 : "+d" (__result), "=Q" (*(atomic)) \
569 : "d" (newval), "m" (*(atomic)) : "cc" ); \
570 __result == oldval; \
573 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
575 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
579 gpointer result = oldval;
580 __asm__ __volatile__ ("cs %0, %2, %1"
581 : "+d" (result), "=Q" (*(atomic))
582 : "d" (newval), "m" (*(atomic)) : "cc" );
583 return result == oldval;
585 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
587 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
591 gpointer result = oldval;
592 gpointer *a = atomic;
593 __asm__ __volatile__ ("csg %0, %2, %1"
594 : "+d" (result), "=Q" (*a)
595 : "d" ((long)(newval)), "m" (*a) : "cc" );
596 return result == oldval;
598 # else /* What's that */
599 # error "Your system has an unsupported pointer size"
600 # endif /* GLIB_SIZEOF_VOID_P */
601 # elif defined (G_ATOMIC_ARM)
602 static volatile int atomic_spin = 0;
604 static int atomic_spin_trylock (void)
611 : "r,0" (1), "r,r" (&atomic_spin)
619 static void atomic_spin_lock (void)
621 while (atomic_spin_trylock())
625 static void atomic_spin_unlock (void)
631 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
639 atomic_spin_unlock();
645 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
650 atomic_spin_unlock();
654 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
661 if (*atomic == oldval)
668 atomic_spin_unlock();
674 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
681 if (*atomic == oldval)
688 atomic_spin_unlock();
692 # elif defined (G_ATOMIC_CRIS) || defined (G_ATOMIC_CRISV32)
693 # ifdef G_ATOMIC_CRIS
694 # define CRIS_ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
697 __asm__ __volatile__ ("\n" \
699 "cmp.d [%[Atomic]], %[OldVal]\n\t" \
702 "move.d %[NewVal], [%[Atomic]]\n\t" \
704 "1:\tseq %[Result]" \
705 : [Result] "=&r" (__result), \
707 : [Atomic] "r" (atomic), \
708 [OldVal] "r" (oldval), \
709 [NewVal] "r" (newval), \
710 "g" (*(gpointer*) (atomic)) \
715 # define CRIS_ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
718 __asm__ __volatile__ ("\n" \
720 "cmp.d [%[Atomic]], %[OldVal]\n\t" \
723 "move.d %[NewVal], [%[Atomic]]\n\t" \
725 "1:\tseq %[Result]" \
726 : [Result] "=&r" (__result), \
728 : [Atomic] "r" (atomic), \
729 [OldVal] "r" (oldval), \
730 [NewVal] "r" (newval), \
731 "g" (*(gpointer*) (atomic)) \
737 #define CRIS_CACHELINE_SIZE 32
738 #define CRIS_ATOMIC_BREAKS_CACHELINE(atomic) \
739 (((gulong)(atomic) & (CRIS_CACHELINE_SIZE - 1)) > (CRIS_CACHELINE_SIZE - sizeof (atomic)))
741 gint __g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
743 void __g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
745 gboolean __g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
748 gboolean __g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
753 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
757 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
758 return __g_atomic_pointer_compare_and_exchange (atomic, oldval, newval);
760 return CRIS_ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
764 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
768 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
769 return __g_atomic_int_compare_and_exchange (atomic, oldval, newval);
771 return CRIS_ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
775 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
780 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
781 return __g_atomic_int_exchange_and_add (atomic, val);
785 while (!CRIS_ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
791 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
796 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
797 return __g_atomic_int_add (atomic, val);
801 while (!CRIS_ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
804 /* We need the atomic mutex for atomic operations where the atomic variable
805 * breaks the 32 byte cache line since the CRIS architecture does not support
806 * atomic operations on such variables. Fortunately this should be rare.
808 # define DEFINE_WITH_MUTEXES
809 # define g_atomic_int_exchange_and_add __g_atomic_int_exchange_and_add
810 # define g_atomic_int_add __g_atomic_int_add
811 # define g_atomic_int_compare_and_exchange __g_atomic_int_compare_and_exchange
812 # define g_atomic_pointer_compare_and_exchange __g_atomic_pointer_compare_and_exchange
814 # else /* !G_ATOMIC_* */
815 # define DEFINE_WITH_MUTEXES
816 # endif /* G_ATOMIC_* */
817 #else /* !__GNUC__ */
818 # ifdef G_PLATFORM_WIN32
819 # define DEFINE_WITH_WIN32_INTERLOCKED
821 # define DEFINE_WITH_MUTEXES
823 #endif /* __GNUC__ */
825 #ifdef DEFINE_WITH_WIN32_INTERLOCKED
826 # include <windows.h>
827 /* Following indicates that InterlockedCompareExchangePointer is
828 * declared in winbase.h (included by windows.h) and needs to be
829 * commented out if not true. It is defined iff WINVER > 0x0400,
830 * which is usually correct but can be wrong if WINVER is set before
831 * windows.h is included.
834 # define HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
838 g_atomic_int_exchange_and_add (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
841 return InterlockedExchangeAdd (atomic, val);
845 g_atomic_int_add (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
848 InterlockedExchangeAdd (atomic, val);
852 g_atomic_int_compare_and_exchange (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
856 #ifndef HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
857 return (guint32) InterlockedCompareExchange ((PVOID*)atomic,
859 (PVOID)oldval) == oldval;
861 return InterlockedCompareExchange (atomic,
868 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
872 # ifdef HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
873 return InterlockedCompareExchangePointer (atomic, newval, oldval) == oldval;
875 # if GLIB_SIZEOF_VOID_P != 4 /* no 32-bit system */
876 # error "InterlockedCompareExchangePointer needed"
878 return InterlockedCompareExchange (atomic, newval, oldval) == oldval;
882 #endif /* DEFINE_WITH_WIN32_INTERLOCKED */
884 #ifdef DEFINE_WITH_MUTEXES
885 /* We have to use the slow, but safe locking method */
886 static GMutex *g_atomic_mutex;
889 * g_atomic_int_exchange_and_add:
890 * @atomic: a pointer to an integer
891 * @val: the value to add to *@atomic
893 * Atomically adds @val to the integer pointed to by @atomic.
894 * It returns the value of *@atomic just before the addition
895 * took place. Also acts as a memory barrier.
897 * Returns: the value of *@atomic before the addition.
902 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
907 g_mutex_lock (g_atomic_mutex);
910 g_mutex_unlock (g_atomic_mutex);
917 * @atomic: a pointer to an integer
918 * @val: the value to add to *@atomic
920 * Atomically adds @val to the integer pointed to by @atomic.
921 * Also acts as a memory barrier.
926 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
929 g_mutex_lock (g_atomic_mutex);
931 g_mutex_unlock (g_atomic_mutex);
935 * g_atomic_int_compare_and_exchange:
936 * @atomic: a pointer to an integer
937 * @oldval: the assumed old value of *@atomic
938 * @newval: the new value of *@atomic
940 * Compares @oldval with the integer pointed to by @atomic and
941 * if they are equal, atomically exchanges *@atomic with @newval.
942 * Also acts as a memory barrier.
944 * Returns: %TRUE, if *@atomic was equal @oldval. %FALSE otherwise.
949 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
955 g_mutex_lock (g_atomic_mutex);
956 if (*atomic == oldval)
963 g_mutex_unlock (g_atomic_mutex);
969 * g_atomic_pointer_compare_and_exchange:
970 * @atomic: a pointer to a #gpointer
971 * @oldval: the assumed old value of *@atomic
972 * @newval: the new value of *@atomic
974 * Compares @oldval with the pointer pointed to by @atomic and
975 * if they are equal, atomically exchanges *@atomic with @newval.
976 * Also acts as a memory barrier.
978 * Returns: %TRUE, if *@atomic was equal @oldval. %FALSE otherwise.
983 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
989 g_mutex_lock (g_atomic_mutex);
990 if (*atomic == oldval)
997 g_mutex_unlock (g_atomic_mutex);
1002 #ifdef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
1006 * @atomic: a pointer to an integer
1008 * Reads the value of the integer pointed to by @atomic.
1009 * Also acts as a memory barrier.
1011 * Returns: the value of *@atomic
1016 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1020 g_mutex_lock (g_atomic_mutex);
1022 g_mutex_unlock (g_atomic_mutex);
1029 * @atomic: a pointer to an integer
1030 * @newval: the new value
1032 * Sets the value of the integer pointed to by @atomic.
1033 * Also acts as a memory barrier.
1038 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1041 g_mutex_lock (g_atomic_mutex);
1043 g_mutex_unlock (g_atomic_mutex);
1047 * g_atomic_pointer_get:
1048 * @atomic: a pointer to a #gpointer.
1050 * Reads the value of the pointer pointed to by @atomic.
1051 * Also acts as a memory barrier.
1053 * Returns: the value to add to *@atomic.
1058 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1062 g_mutex_lock (g_atomic_mutex);
1064 g_mutex_unlock (g_atomic_mutex);
1070 * g_atomic_pointer_set:
1071 * @atomic: a pointer to a #gpointer
1072 * @newval: the new value
1074 * Sets the value of the pointer pointed to by @atomic.
1075 * Also acts as a memory barrier.
1080 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1083 g_mutex_lock (g_atomic_mutex);
1085 g_mutex_unlock (g_atomic_mutex);
1087 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
1088 #elif defined (G_ATOMIC_OP_MEMORY_BARRIER_NEEDED)
1090 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1092 G_ATOMIC_MEMORY_BARRIER;
1097 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1101 G_ATOMIC_MEMORY_BARRIER;
1105 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1107 G_ATOMIC_MEMORY_BARRIER;
1112 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1116 G_ATOMIC_MEMORY_BARRIER;
1118 #endif /* DEFINE_WITH_MUTEXES || G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
1120 #ifdef ATOMIC_INT_CMP_XCHG
1122 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
1126 return ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
1130 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
1136 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
1142 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
1148 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
1150 #endif /* ATOMIC_INT_CMP_XCHG */
1153 _g_atomic_thread_init (void)
1155 #ifdef DEFINE_WITH_MUTEXES
1156 g_atomic_mutex = g_mutex_new ();
1157 #endif /* DEFINE_WITH_MUTEXES */
1160 #ifndef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
1162 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1164 return g_atomic_int_get (atomic);
1168 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1171 g_atomic_int_set (atomic, newval);
1175 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1177 return g_atomic_pointer_get (atomic);
1181 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1184 g_atomic_pointer_set (atomic, newval);
1186 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */