1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * g_atomic_*: atomic operations.
5 * Copyright (C) 2003 Sebastian Wilhelmi
6 * Copyright (C) 2007 Nokia Corporation
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the
20 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
21 * Boston, MA 02111-1307, USA.
26 #if defined (G_ATOMIC_ARM)
31 #include "gthreadprivate.h"
34 * SECTION:atomic_operations
35 * @title: Atomic Operations
36 * @short_description: basic atomic integer and pointer operations
39 * The following functions can be used to atomically access integers and
40 * pointers. They are implemented as inline assembler function on most
41 * platforms and use slower fall-backs otherwise. Using them can sometimes
42 * save you from using a performance-expensive #GMutex to protect the
45 * The most important usage is reference counting. Using
46 * g_atomic_int_inc() and g_atomic_int_dec_and_test() makes reference
47 * counting a very fast operation.
49 * <note><para>You must not directly read integers or pointers concurrently
50 * accessed by multiple threads, but use the atomic accessor functions
51 * instead. That is, always use g_atomic_int_get() and g_atomic_pointer_get()
52 * for read outs. They provide the neccessary synchonization mechanisms
53 * like memory barriers to access memory locations concurrently.
56 * <note><para>If you are using those functions for anything apart from
57 * simple reference counting, you should really be aware of the implications
58 * of doing that. There are literally thousands of ways to shoot yourself
59 * in the foot. So if in doubt, use a #GMutex. If you don't know, what
60 * memory barriers are, do not use anything but g_atomic_int_inc() and
61 * g_atomic_int_dec_and_test().
64 * <note><para>It is not safe to set an integer or pointer just by assigning
65 * to it, when it is concurrently accessed by other threads with the following
66 * functions. Use g_atomic_int_compare_and_exchange() or
67 * g_atomic_pointer_compare_and_exchange() respectively.
71 #if defined (__GNUC__)
72 # if defined (G_ATOMIC_I486)
73 /* Adapted from CVS version 1.10 of glibc's sysdeps/i386/i486/bits/atomic.h
76 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
81 __asm__ __volatile__ ("lock; xaddl %0,%1"
82 : "=r" (result), "=m" (*atomic)
83 : "0" (val), "m" (*atomic));
88 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
91 __asm__ __volatile__ ("lock; addl %1,%0"
93 : "ir" (val), "m" (*atomic));
97 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
103 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
104 : "=a" (result), "=m" (*atomic)
105 : "r" (newval), "m" (*atomic), "0" (oldval));
107 return result == oldval;
110 /* The same code as above, as on i386 gpointer is 32 bit as well.
111 * Duplicating the code here seems more natural than casting the
112 * arguments and calling the former function */
115 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
121 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
122 : "=a" (result), "=m" (*atomic)
123 : "r" (newval), "m" (*atomic), "0" (oldval));
125 return result == oldval;
128 # elif defined (G_ATOMIC_SPARCV9)
129 /* Adapted from CVS version 1.3 of glibc's sysdeps/sparc/sparc64/bits/atomic.h
131 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
134 __asm__ __volatile__ ("cas [%4], %2, %0" \
135 : "=r" (__result), "=m" (*(atomic)) \
136 : "r" (oldval), "m" (*(atomic)), "r" (atomic),\
138 __result == oldval; \
141 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
143 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
148 __asm__ __volatile__ ("cas [%4], %2, %0"
149 : "=r" (result), "=m" (*atomic)
150 : "r" (oldval), "m" (*atomic), "r" (atomic),
152 return result == oldval;
154 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
156 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
161 gpointer *a = atomic;
162 __asm__ __volatile__ ("casx [%4], %2, %0"
163 : "=r" (result), "=m" (*a)
164 : "r" (oldval), "m" (*a), "r" (a),
166 return result == oldval;
168 # else /* What's that */
169 # error "Your system has an unsupported pointer size"
170 # endif /* GLIB_SIZEOF_VOID_P */
171 # define G_ATOMIC_MEMORY_BARRIER \
172 __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" \
173 " | #StoreLoad | #StoreStore" : : : "memory")
175 # elif defined (G_ATOMIC_ALPHA)
176 /* Adapted from CVS version 1.3 of glibc's sysdeps/alpha/bits/atomic.h
178 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
182 __asm__ __volatile__ ( \
185 " cmpeq %0,%3,%1\n" \
200 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
202 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
208 __asm__ __volatile__ (
226 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
228 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
234 __asm__ __volatile__ (
252 # else /* What's that */
253 # error "Your system has an unsupported pointer size"
254 # endif /* GLIB_SIZEOF_VOID_P */
255 # define G_ATOMIC_MEMORY_BARRIER __asm__ ("mb" : : : "memory")
256 # elif defined (G_ATOMIC_X86_64)
257 /* Adapted from CVS version 1.9 of glibc's sysdeps/x86_64/bits/atomic.h
260 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
265 __asm__ __volatile__ ("lock; xaddl %0,%1"
266 : "=r" (result), "=m" (*atomic)
267 : "0" (val), "m" (*atomic));
272 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
275 __asm__ __volatile__ ("lock; addl %1,%0"
277 : "ir" (val), "m" (*atomic));
281 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
287 __asm__ __volatile__ ("lock; cmpxchgl %2, %1"
288 : "=a" (result), "=m" (*atomic)
289 : "r" (newval), "m" (*atomic), "0" (oldval));
291 return result == oldval;
295 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
301 __asm__ __volatile__ ("lock; cmpxchgq %q2, %1"
302 : "=a" (result), "=m" (*atomic)
303 : "r" (newval), "m" (*atomic), "0" (oldval));
305 return result == oldval;
308 # elif defined (G_ATOMIC_POWERPC)
309 /* Adapted from CVS version 1.16 of glibc's sysdeps/powerpc/bits/atomic.h
310 * and CVS version 1.4 of glibc's sysdeps/powerpc/powerpc32/bits/atomic.h
311 * and CVS version 1.7 of glibc's sysdeps/powerpc/powerpc64/bits/atomic.h
314 /* Non-optimizing compile bails on the following two asm statements
315 * for reasons unknown to the author */
317 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
321 #if ASM_NUMERIC_LABELS
322 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
326 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
327 : "b" (atomic), "r" (val), "m" (*atomic)
330 __asm__ __volatile__ (".Lieaa%=: lwarx %0,0,%3\n"
334 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
335 : "b" (atomic), "r" (val), "m" (*atomic)
341 /* The same as above, to save a function call repeated here */
343 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
347 #if ASM_NUMERIC_LABELS
348 __asm__ __volatile__ ("1: lwarx %0,0,%3\n"
352 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
353 : "b" (atomic), "r" (val), "m" (*atomic)
356 __asm__ __volatile__ (".Lia%=: lwarx %0,0,%3\n"
360 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
361 : "b" (atomic), "r" (val), "m" (*atomic)
365 # else /* !__OPTIMIZE__ */
367 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
373 while (!g_atomic_int_compare_and_exchange (atomic, result, result + val));
379 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
385 while (!g_atomic_int_compare_and_exchange (atomic, result, result + val));
387 # endif /* !__OPTIMIZE__ */
389 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
391 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
396 #if ASM_NUMERIC_LABELS
397 __asm__ __volatile__ ("sync\n"
405 : "b" (atomic), "r" (oldval), "r" (newval)
408 __asm__ __volatile__ ("sync\n"
409 ".L1icae%=: lwarx %0,0,%1\n"
416 : "b" (atomic), "r" (oldval), "r" (newval)
423 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
428 #if ASM_NUMERIC_LABELS
429 __asm__ __volatile__ ("sync\n"
437 : "b" (atomic), "r" (oldval), "r" (newval)
440 __asm__ __volatile__ ("sync\n"
441 ".L1pcae%=: lwarx %0,0,%1\n"
448 : "b" (atomic), "r" (oldval), "r" (newval)
453 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
455 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
460 #if ASM_NUMERIC_LABELS
461 __asm__ __volatile__ ("sync\n"
470 : "b" (atomic), "r" (oldval), "r" (newval)
473 __asm__ __volatile__ ("sync\n"
474 ".L1icae%=: lwarx %0,0,%1\n"
482 : "b" (atomic), "r" (oldval), "r" (newval)
489 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
494 #if ASM_NUMERIC_LABELS
495 __asm__ __volatile__ ("sync\n"
503 : "b" (atomic), "r" (oldval), "r" (newval)
506 __asm__ __volatile__ ("sync\n"
507 ".L1pcae%=: ldarx %0,0,%1\n"
514 : "b" (atomic), "r" (oldval), "r" (newval)
519 # else /* What's that */
520 # error "Your system has an unsupported pointer size"
521 # endif /* GLIB_SIZEOF_VOID_P */
523 # define G_ATOMIC_MEMORY_BARRIER __asm__ ("sync" : : : "memory")
525 # elif defined (G_ATOMIC_IA64)
526 /* Adapted from CVS version 1.8 of glibc's sysdeps/ia64/bits/atomic.h
529 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
532 return __sync_fetch_and_add (atomic, val);
536 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
539 __sync_fetch_and_add (atomic, val);
543 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
547 return __sync_bool_compare_and_swap (atomic, oldval, newval);
551 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
555 return __sync_bool_compare_and_swap ((long *)atomic,
556 (long)oldval, (long)newval);
559 # define G_ATOMIC_MEMORY_BARRIER __sync_synchronize ()
560 # elif defined (G_ATOMIC_S390)
561 /* Adapted from glibc's sysdeps/s390/bits/atomic.h
563 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
565 gint __result = oldval; \
566 __asm__ __volatile__ ("cs %0, %2, %1" \
567 : "+d" (__result), "=Q" (*(atomic)) \
568 : "d" (newval), "m" (*(atomic)) : "cc" ); \
569 __result == oldval; \
572 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
574 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
578 gpointer result = oldval;
579 __asm__ __volatile__ ("cs %0, %2, %1"
580 : "+d" (result), "=Q" (*(atomic))
581 : "d" (newval), "m" (*(atomic)) : "cc" );
582 return result == oldval;
584 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
586 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
590 gpointer result = oldval;
591 gpointer *a = atomic;
592 __asm__ __volatile__ ("csg %0, %2, %1"
593 : "+d" (result), "=Q" (*a)
594 : "d" ((long)(newval)), "m" (*a) : "cc" );
595 return result == oldval;
597 # else /* What's that */
598 # error "Your system has an unsupported pointer size"
599 # endif /* GLIB_SIZEOF_VOID_P */
600 # elif defined (G_ATOMIC_ARM)
601 static volatile int atomic_spin = 0;
603 static int atomic_spin_trylock (void)
610 : "r,0" (1), "r,r" (&atomic_spin)
618 static void atomic_spin_lock (void)
620 while (atomic_spin_trylock())
624 static void atomic_spin_unlock (void)
630 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
638 atomic_spin_unlock();
644 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
649 atomic_spin_unlock();
653 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
660 if (*atomic == oldval)
667 atomic_spin_unlock();
673 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
680 if (*atomic == oldval)
687 atomic_spin_unlock();
691 # elif defined (G_ATOMIC_CRIS) || defined (G_ATOMIC_CRISV32)
692 # ifdef G_ATOMIC_CRIS
693 # define CRIS_ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
696 __asm__ __volatile__ ("\n" \
698 "cmp.d [%[Atomic]], %[OldVal]\n\t" \
701 "move.d %[NewVal], [%[Atomic]]\n\t" \
703 "1:\tseq %[Result]" \
704 : [Result] "=&r" (__result), \
706 : [Atomic] "r" (atomic), \
707 [OldVal] "r" (oldval), \
708 [NewVal] "r" (newval), \
709 "g" (*(gpointer*) (atomic)) \
714 # define CRIS_ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
717 __asm__ __volatile__ ("\n" \
719 "cmp.d [%[Atomic]], %[OldVal]\n\t" \
722 "move.d %[NewVal], [%[Atomic]]\n\t" \
724 "1:\tseq %[Result]" \
725 : [Result] "=&r" (__result), \
727 : [Atomic] "r" (atomic), \
728 [OldVal] "r" (oldval), \
729 [NewVal] "r" (newval), \
730 "g" (*(gpointer*) (atomic)) \
736 #define CRIS_CACHELINE_SIZE 32
737 #define CRIS_ATOMIC_BREAKS_CACHELINE(atomic) \
738 (((gulong)(atomic) & (CRIS_CACHELINE_SIZE - 1)) > (CRIS_CACHELINE_SIZE - sizeof (atomic)))
740 gint __g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
742 void __g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
744 gboolean __g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
747 gboolean __g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
752 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
756 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
757 return __g_atomic_pointer_compare_and_exchange (atomic, oldval, newval);
759 return CRIS_ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
763 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
767 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
768 return __g_atomic_int_compare_and_exchange (atomic, oldval, newval);
770 return CRIS_ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
774 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
779 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
780 return __g_atomic_int_exchange_and_add (atomic, val);
784 while (!CRIS_ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
790 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
795 if (G_UNLIKELY (CRIS_ATOMIC_BREAKS_CACHELINE (atomic)))
796 return __g_atomic_int_add (atomic, val);
800 while (!CRIS_ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
803 /* We need the atomic mutex for atomic operations where the atomic variable
804 * breaks the 32 byte cache line since the CRIS architecture does not support
805 * atomic operations on such variables. Fortunately this should be rare.
807 # define DEFINE_WITH_MUTEXES
808 # define g_atomic_int_exchange_and_add __g_atomic_int_exchange_and_add
809 # define g_atomic_int_add __g_atomic_int_add
810 # define g_atomic_int_compare_and_exchange __g_atomic_int_compare_and_exchange
811 # define g_atomic_pointer_compare_and_exchange __g_atomic_pointer_compare_and_exchange
813 # else /* !G_ATOMIC_* */
814 # define DEFINE_WITH_MUTEXES
815 # endif /* G_ATOMIC_* */
816 #else /* !__GNUC__ */
817 # ifdef G_PLATFORM_WIN32
818 # define DEFINE_WITH_WIN32_INTERLOCKED
820 # define DEFINE_WITH_MUTEXES
822 #endif /* __GNUC__ */
824 #ifdef DEFINE_WITH_WIN32_INTERLOCKED
825 # include <windows.h>
826 /* Following indicates that InterlockedCompareExchangePointer is
827 * declared in winbase.h (included by windows.h) and needs to be
828 * commented out if not true. It is defined iff WINVER > 0x0400,
829 * which is usually correct but can be wrong if WINVER is set before
830 * windows.h is included.
833 # define HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
837 g_atomic_int_exchange_and_add (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
840 return InterlockedExchangeAdd (atomic, val);
844 g_atomic_int_add (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
847 InterlockedExchangeAdd (atomic, val);
851 g_atomic_int_compare_and_exchange (volatile gint32 G_GNUC_MAY_ALIAS *atomic,
855 #ifndef HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
856 return (guint32) InterlockedCompareExchange ((PVOID*)atomic,
858 (PVOID)oldval) == oldval;
860 return InterlockedCompareExchange (atomic,
867 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
871 # ifdef HAVE_INTERLOCKED_COMPARE_EXCHANGE_POINTER
872 return InterlockedCompareExchangePointer (atomic, newval, oldval) == oldval;
874 # if GLIB_SIZEOF_VOID_P != 4 /* no 32-bit system */
875 # error "InterlockedCompareExchangePointer needed"
877 return InterlockedCompareExchange (atomic, newval, oldval) == oldval;
881 #endif /* DEFINE_WITH_WIN32_INTERLOCKED */
883 #ifdef DEFINE_WITH_MUTEXES
884 /* We have to use the slow, but safe locking method */
885 static GMutex *g_atomic_mutex;
888 * g_atomic_int_exchange_and_add:
889 * @atomic: a pointer to an integer
890 * @val: the value to add to *@atomic
892 * Atomically adds @val to the integer pointed to by @atomic.
893 * It returns the value of *@atomic just before the addition
894 * took place. Also acts as a memory barrier.
896 * Returns: the value of *@atomic before the addition.
901 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
906 g_mutex_lock (g_atomic_mutex);
909 g_mutex_unlock (g_atomic_mutex);
916 * @atomic: a pointer to an integer
917 * @val: the value to add to *@atomic
919 * Atomically adds @val to the integer pointed to by @atomic.
920 * Also acts as a memory barrier.
925 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
928 g_mutex_lock (g_atomic_mutex);
930 g_mutex_unlock (g_atomic_mutex);
934 * g_atomic_int_compare_and_exchange:
935 * @atomic: a pointer to an integer
936 * @oldval: the assumed old value of *@atomic
937 * @newval: the new value of *@atomic
939 * Compares @oldval with the integer pointed to by @atomic and
940 * if they are equal, atomically exchanges *@atomic with @newval.
941 * Also acts as a memory barrier.
943 * Returns: %TRUE, if *@atomic was equal @oldval. %FALSE otherwise.
948 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
954 g_mutex_lock (g_atomic_mutex);
955 if (*atomic == oldval)
962 g_mutex_unlock (g_atomic_mutex);
968 * g_atomic_pointer_compare_and_exchange:
969 * @atomic: a pointer to a #gpointer
970 * @oldval: the assumed old value of *@atomic
971 * @newval: the new value of *@atomic
973 * Compares @oldval with the pointer pointed to by @atomic and
974 * if they are equal, atomically exchanges *@atomic with @newval.
975 * Also acts as a memory barrier.
977 * Returns: %TRUE, if *@atomic was equal @oldval. %FALSE otherwise.
982 g_atomic_pointer_compare_and_exchange (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
988 g_mutex_lock (g_atomic_mutex);
989 if (*atomic == oldval)
996 g_mutex_unlock (g_atomic_mutex);
1001 #ifdef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
1005 * @atomic: a pointer to an integer
1007 * Reads the value of the integer pointed to by @atomic.
1008 * Also acts as a memory barrier.
1010 * Returns: the value of *@atomic
1015 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1019 g_mutex_lock (g_atomic_mutex);
1021 g_mutex_unlock (g_atomic_mutex);
1028 * @atomic: a pointer to an integer
1029 * @newval: the new value
1031 * Sets the value of the integer pointed to by @atomic.
1032 * Also acts as a memory barrier.
1037 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1040 g_mutex_lock (g_atomic_mutex);
1042 g_mutex_unlock (g_atomic_mutex);
1046 * g_atomic_pointer_get:
1047 * @atomic: a pointer to a #gpointer.
1049 * Reads the value of the pointer pointed to by @atomic.
1050 * Also acts as a memory barrier.
1052 * Returns: the value to add to *@atomic.
1057 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1061 g_mutex_lock (g_atomic_mutex);
1063 g_mutex_unlock (g_atomic_mutex);
1069 * g_atomic_pointer_set:
1070 * @atomic: a pointer to a #gpointer
1071 * @newval: the new value
1073 * Sets the value of the pointer pointed to by @atomic.
1074 * Also acts as a memory barrier.
1079 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1082 g_mutex_lock (g_atomic_mutex);
1084 g_mutex_unlock (g_atomic_mutex);
1086 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
1087 #elif defined (G_ATOMIC_OP_MEMORY_BARRIER_NEEDED)
1089 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1091 G_ATOMIC_MEMORY_BARRIER;
1096 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1100 G_ATOMIC_MEMORY_BARRIER;
1104 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1106 G_ATOMIC_MEMORY_BARRIER;
1111 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1115 G_ATOMIC_MEMORY_BARRIER;
1117 #endif /* DEFINE_WITH_MUTEXES || G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
1119 #ifdef ATOMIC_INT_CMP_XCHG
1121 g_atomic_int_compare_and_exchange (volatile gint G_GNUC_MAY_ALIAS *atomic,
1125 return ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
1129 g_atomic_int_exchange_and_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
1135 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
1141 g_atomic_int_add (volatile gint G_GNUC_MAY_ALIAS *atomic,
1147 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
1149 #endif /* ATOMIC_INT_CMP_XCHG */
1152 _g_atomic_thread_init (void)
1154 #ifdef DEFINE_WITH_MUTEXES
1155 g_atomic_mutex = g_mutex_new ();
1156 #endif /* DEFINE_WITH_MUTEXES */
1159 #ifndef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
1161 (g_atomic_int_get) (volatile gint G_GNUC_MAY_ALIAS *atomic)
1163 return g_atomic_int_get (atomic);
1167 (g_atomic_int_set) (volatile gint G_GNUC_MAY_ALIAS *atomic,
1170 g_atomic_int_set (atomic, newval);
1174 (g_atomic_pointer_get) (volatile gpointer G_GNUC_MAY_ALIAS *atomic)
1176 return g_atomic_pointer_get (atomic);
1180 (g_atomic_pointer_set) (volatile gpointer G_GNUC_MAY_ALIAS *atomic,
1183 g_atomic_pointer_set (atomic, newval);
1185 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */