1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * g_atomic_*: atomic operations.
5 * Copyright (C) 2003 Sebastian Wilhelmi
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
20 * Boston, MA 02111-1307, USA.
27 #if defined (__GNUC__)
28 # if defined (G_ATOMIC_I486)
29 /* Adapted from CVS version 1.10 of glibc's sysdeps/i386/i486/bits/atomic.h
32 g_atomic_int_exchange_and_add (gint *atomic,
37 __asm__ __volatile__ ("lock; xaddl %0,%1"
38 : "=r" (result), "=m" (*atomic)
39 : "0" (val), "m" (*atomic));
44 g_atomic_int_add (gint *atomic,
47 __asm__ __volatile__ ("lock; addl %1,%0"
49 : "ir" (val), "m" (*atomic));
53 g_atomic_int_compare_and_exchange (gint *atomic,
59 __asm __volatile ("lock; cmpxchgl %2, %1"
60 : "=a" (result), "=m" (*atomic)
61 : "r" (newval), "m" (*atomic), "0" (oldval));
63 return result == oldval;
66 /* The same code as above, as on i386 gpointer is 32 bit as well.
67 * Duplicating the code here seems more natural than casting the
68 * arguments and calling the former function */
71 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
77 __asm __volatile ("lock; cmpxchgl %2, %1"
78 : "=a" (result), "=m" (*atomic)
79 : "r" (newval), "m" (*atomic), "0" (oldval));
81 return result == oldval;
84 # elif defined (G_ATOMIC_SPARCV9)
85 /* Adapted from CVS version 1.3 of glibc's sysdeps/sparc/sparc64/bits/atomic.h
87 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
90 __asm __volatile ("cas [%4], %2, %0" \
91 : "=r" (__result), "=m" (*(atomic)) \
92 : "r" (oldval), "m" (*(atomic)), "r" (atomic), \
97 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
99 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
104 __asm __volatile ("cas [%4], %2, %0"
105 : "=r" (result), "=m" (*atomic)
106 : "r" (oldval), "m" (*atomic), "r" (atomic),
108 return result == oldval;
110 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
112 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
117 gpointer *a = atomic;
118 __asm __volatile ("casx [%4], %2, %0"
119 : "=r" (result), "=m" (*a)
120 : "r" (oldval), "m" (*a), "r" (a),
124 # else /* What's that */
125 # error "Your system has an unsupported pointer size"
126 # endif /* GLIB_SIZEOF_VOID_P */
127 # define G_ATOMIC_MEMORY_BARRIER \
128 __asm __volatile ("membar #LoadLoad | #LoadStore" \
129 " | #StoreLoad | #StoreStore" : : : "memory")
131 # elif defined (G_ATOMIC_ALPHA)
132 /* Adapted from CVS version 1.3 of glibc's sysdeps/alpha/bits/atomic.h
134 # define ATOMIC_INT_CMP_XCHG(atomic, oldval, newval) \
138 __asm__ __volatile__ ( \
141 " cmpeq %0,%3,%1\n" \
156 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
158 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
164 __asm__ __volatile__ (
182 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
184 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
190 __asm__ __volatile__ (
208 # else /* What's that */
209 # error "Your system has an unsupported pointer size"
210 # endif /* GLIB_SIZEOF_VOID_P */
211 # define G_ATOMIC_MEMORY_BARRIER __asm ("mb" : : : "memory")
212 # elif defined (G_ATOMIC_X86_64)
213 /* Adapted from CVS version 1.9 of glibc's sysdeps/x86_64/bits/atomic.h
216 g_atomic_int_exchange_and_add (gint *atomic,
221 __asm__ __volatile__ ("lock; xaddl %0,%1"
222 : "=r" (result), "=m" (*atomic)
223 : "0" (val), "m" (*atomic));
228 g_atomic_int_add (gint *atomic,
231 __asm__ __volatile__ ("lock; addl %1,%0"
233 : "ir" (val), "m" (*atomic));
237 g_atomic_int_compare_and_exchange (gint *atomic,
243 __asm __volatile ("lock; cmpxchgl %2, %1"
244 : "=a" (result), "=m" (*atomic)
245 : "r" (newval), "m" (*atomic), "0" (oldval));
247 return result == oldval;
251 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
257 __asm __volatile ("lock; cmpxchgq %q2, %1"
258 : "=a" (result), "=m" (*atomic)
259 : "r" (newval), "m" (*atomic), "0" (oldval));
261 return result == oldval;
264 # elif defined (G_ATOMIC_POWERPC)
265 /* Adapted from CVS version 1.12 of glibc's sysdeps/powerpc/bits/atomic.h
266 * and CVS version 1.3 of glibc's sysdeps/powerpc/powerpc32/bits/atomic.h
267 * and CVS version 1.2 of glibc's sysdeps/powerpc/powerpc64/bits/atomic.h
270 g_atomic_int_exchange_and_add (gint *atomic,
274 __asm __volatile ("1: lwarx %0,0,%3\n"
278 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
279 : "b" (atomic), "r" (val), "2" (*atomic)
284 /* The same as above, to save a function call repeated here */
286 g_atomic_int_add (gint *atomic,
290 __asm __volatile ("1: lwarx %0,0,%3\n"
294 : "=&b" (result), "=&r" (temp), "=m" (*atomic)
295 : "b" (atomic), "r" (val), "2" (*atomic)
299 # if GLIB_SIZEOF_VOID_P == 4 /* 32-bit system */
301 g_atomic_int_compare_and_exchange (gint *atomic,
306 __asm __volatile ("sync\n"
314 : "b" (atomic), "r" (oldval), "r" (newval)
320 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
325 __asm __volatile ("sync\n"
333 : "b" (atomic), "r" (oldval), "r" (newval)
337 # elif GLIB_SIZEOF_VOID_P == 8 /* 64-bit system */
339 g_atomic_int_compare_and_exchange (gint *atomic,
344 __asm __volatile ("sync\n"
353 : "b" (atomic), "r" (oldval), "r" (newval)
359 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
364 __asm __volatile ("sync\n"
372 : "b" (atomic), "r" (oldval), "r" (newval)
376 # else /* What's that */
377 # error "Your system has an unsupported pointer size"
378 # endif /* GLIB_SIZEOF_VOID_P */
380 # define G_ATOMIC_MEMORY_BARRIER __asm ("sync" : : : "memory")
382 # elif defined (G_ATOMIC_IA64)
383 /* Adapted from CVS version 1.8 of glibc's sysdeps/ia64/bits/atomic.h
386 g_atomic_int_exchange_and_add (gint *atomic,
389 return __sync_fetch_and_add_si (atomic, val);
393 g_atomic_int_add (gint *atomic,
396 __sync_fetch_and_add_si (atomic, val);
400 g_atomic_int_compare_and_exchange (gint *atomic,
404 return __sync_bool_compare_and_swap_si (atomic, oldval, newval);
408 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
412 return __sync_bool_compare_and_swap_di ((long *)atomic,
413 (long)oldval, (long)newval);
416 # define G_ATOMIC_MEMORY_BARRIER __sync_synchronize ()
417 # else /* !G_ATOMIC */
418 # define DEFINE_WITH_MUTEXES
419 # endif /* G_ATOMIC */
420 #else /* !__GNUC__ */
421 # ifdef G_PLATFORM_WIN32
422 # define DEFINE_WITH_WIN32_INTERLOCKED
424 # define DEFINE_WITH_MUTEXES
426 #endif /* __GNUC__ */
428 #ifdef DEFINE_WITH_WIN32_INTERLOCKED
429 # include <windows.h>
431 g_atomic_int_exchange_and_add (gint32 *atomic,
434 return InterlockedExchangeAdd (atomic, val);
438 g_atomic_int_add (gint32 *atomic,
441 InterlockedExchangeAdd (atomic, val);
445 g_atomic_int_compare_and_exchange (gint32 *atomic,
449 return (guint32)InterlockedCompareExchange ((PVOID*)atomic,
451 (PVOID)oldval) == oldval;
455 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
459 # if GLIB_SIZEOF_VOID_P != 4 /* no 32-bit system */
460 # error "InterlockedCompareExchangePointer needed"
462 return InterlockedCompareExchange (atomic, newval, oldval) == oldval;
465 #endif /* DEFINE_WITH_WIN32_INTERLOCKED */
467 #ifdef DEFINE_WITH_MUTEXES
468 /* We have to use the slow, but safe locking method */
469 G_LOCK_DEFINE_STATIC (g_atomic_lock);
471 g_atomic_int_exchange_and_add (gint *atomic,
476 G_LOCK (g_atomic_lock);
479 G_UNLOCK (g_atomic_lock);
486 g_atomic_int_add (gint *atomic,
489 G_LOCK (g_atomic_lock);
491 G_UNLOCK (g_atomic_lock);
495 g_atomic_int_compare_and_exchange (gint *atomic,
501 G_LOCK (g_atomic_lock);
502 if (*atomic == oldval)
509 G_UNLOCK (g_atomic_lock);
515 g_atomic_pointer_compare_and_exchange (gpointer *atomic,
521 G_LOCK (g_atomic_lock);
522 if (*atomic == oldval)
529 G_UNLOCK (g_atomic_lock);
534 #ifdef G_ATOMIC_OP_MEMORY_BARRIER_NEEDED
536 g_atomic_int_get (gint *atomic)
540 G_LOCK (g_atomic_lock);
542 G_UNLOCK (g_atomic_lock);
548 g_atomic_pointer_get (gpointer *atomic)
552 G_LOCK (g_atomic_lock);
554 G_UNLOCK (g_atomic_lock);
558 #endif /* G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
559 #elif defined (G_ATOMIC_OP_MEMORY_BARRIER_NEEDED)
561 g_atomic_int_get (gint *atomic)
563 gint result = *atomic;
565 G_ATOMIC_MEMORY_BARRIER;
571 g_atomic_pointer_get (gpointer *atomic)
573 gpointer result = *atomic;
575 G_ATOMIC_MEMORY_BARRIER;
579 #endif /* DEFINE_WITH_MUTEXES || G_ATOMIC_OP_MEMORY_BARRIER_NEEDED */
581 #ifdef ATOMIC_INT_CMP_XCHG
583 g_atomic_int_compare_and_exchange (gint *atomic,
587 return ATOMIC_INT_CMP_XCHG (atomic, oldval, newval);
591 g_atomic_int_exchange_and_add (gint *atomic,
597 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
603 g_atomic_int_add (gint *atomic,
609 while (!ATOMIC_INT_CMP_XCHG (atomic, result, result + val));
611 #endif /* ATOMIC_INT_CMP_XCHG */