2 * Copyright © 2011 Ryan Lortie
4 * SPDX-License-Identifier: LGPL-2.1-or-later
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful, but
12 * WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 * Author: Ryan Lortie <desrt@desrt.ca>
22 #ifndef __G_ATOMIC_H__
23 #define __G_ATOMIC_H__
25 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
26 #error "Only <glib.h> can be included directly."
29 #include <glib/gtypes.h>
30 #include <glib/glib-typeof.h>
35 gint g_atomic_int_get (const volatile gint *atomic);
37 void g_atomic_int_set (volatile gint *atomic,
40 void g_atomic_int_inc (volatile gint *atomic);
42 gboolean g_atomic_int_dec_and_test (volatile gint *atomic);
44 gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic,
47 GLIB_AVAILABLE_IN_2_74
48 gboolean g_atomic_int_compare_and_exchange_full (gint *atomic,
52 GLIB_AVAILABLE_IN_2_74
53 gint g_atomic_int_exchange (gint *atomic,
56 gint g_atomic_int_add (volatile gint *atomic,
58 GLIB_AVAILABLE_IN_2_30
59 guint g_atomic_int_and (volatile guint *atomic,
61 GLIB_AVAILABLE_IN_2_30
62 guint g_atomic_int_or (volatile guint *atomic,
65 guint g_atomic_int_xor (volatile guint *atomic,
69 gpointer g_atomic_pointer_get (const volatile void *atomic);
71 void g_atomic_pointer_set (volatile void *atomic,
74 gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic,
77 GLIB_AVAILABLE_IN_2_74
78 gboolean g_atomic_pointer_compare_and_exchange_full (void *atomic,
82 GLIB_AVAILABLE_IN_2_74
83 gpointer g_atomic_pointer_exchange (void *atomic,
86 gintptr g_atomic_pointer_add (volatile void *atomic,
88 GLIB_AVAILABLE_IN_2_30
89 guintptr g_atomic_pointer_and (volatile void *atomic,
91 GLIB_AVAILABLE_IN_2_30
92 guintptr g_atomic_pointer_or (volatile void *atomic,
95 guintptr g_atomic_pointer_xor (volatile void *atomic,
98 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
99 gint g_atomic_int_exchange_and_add (volatile gint *atomic,
104 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
106 /* We prefer the new C11-style atomic extension of GCC if available */
107 #if defined(__ATOMIC_SEQ_CST)
109 #define g_atomic_int_get(atomic) \
110 (G_GNUC_EXTENSION ({ \
111 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
113 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
114 __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \
117 #define g_atomic_int_set(atomic, newval) \
118 (G_GNUC_EXTENSION ({ \
119 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
120 gint gais_temp = (gint) (newval); \
121 (void) (0 ? *(atomic) ^ (newval) : 1); \
122 __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \
125 #if defined(glib_typeof)
126 #define g_atomic_pointer_get(atomic) \
127 (G_GNUC_EXTENSION ({ \
128 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
129 glib_typeof (*(atomic)) gapg_temp_newval; \
130 glib_typeof ((atomic)) gapg_temp_atomic = (atomic); \
131 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
134 #define g_atomic_pointer_set(atomic, newval) \
135 (G_GNUC_EXTENSION ({ \
136 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
137 glib_typeof ((atomic)) gaps_temp_atomic = (atomic); \
138 glib_typeof (*(atomic)) gaps_temp_newval = (newval); \
139 (void) (0 ? (gpointer) * (atomic) : NULL); \
140 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
142 #else /* if !(defined(glib_typeof) */
143 #define g_atomic_pointer_get(atomic) \
144 (G_GNUC_EXTENSION ({ \
145 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
146 gpointer gapg_temp_newval; \
147 gpointer *gapg_temp_atomic = (gpointer *)(atomic); \
148 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
151 #define g_atomic_pointer_set(atomic, newval) \
152 (G_GNUC_EXTENSION ({ \
153 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
154 gpointer *gaps_temp_atomic = (gpointer *)(atomic); \
155 gpointer gaps_temp_newval = (gpointer)(newval); \
156 (void) (0 ? (gpointer) *(atomic) : NULL); \
157 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
159 #endif /* if defined(glib_typeof) */
161 #define g_atomic_int_inc(atomic) \
162 (G_GNUC_EXTENSION ({ \
163 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
164 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
165 (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \
167 #define g_atomic_int_dec_and_test(atomic) \
168 (G_GNUC_EXTENSION ({ \
169 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
170 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
171 __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \
173 #if defined(glib_typeof) && defined(G_CXX_STD_VERSION)
174 /* See comments below about equivalent g_atomic_pointer_compare_and_exchange()
175 * shenanigans for type-safety when compiling in C++ mode. */
176 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
177 (G_GNUC_EXTENSION ({ \
178 glib_typeof (*(atomic)) gaicae_oldval = (oldval); \
179 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
180 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
181 __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
183 #else /* if !(defined(glib_typeof) && defined(G_CXX_STD_VERSION)) */
184 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
185 (G_GNUC_EXTENSION ({ \
186 gint gaicae_oldval = (oldval); \
187 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
188 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
189 __atomic_compare_exchange_n ((atomic), (void *) (&(gaicae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
191 #endif /* defined(glib_typeof) */
192 #define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
193 (G_GNUC_EXTENSION ({ \
194 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
195 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gint)); \
196 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) ^ *(preval) : 1); \
197 *(preval) = (oldval); \
198 __atomic_compare_exchange_n ((atomic), (preval), (newval), FALSE, \
199 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) \
202 #define g_atomic_int_exchange(atomic, newval) \
203 (G_GNUC_EXTENSION ({ \
204 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
205 (void) (0 ? *(atomic) ^ (newval) : 1); \
206 (gint) __atomic_exchange_n ((atomic), (newval), __ATOMIC_SEQ_CST); \
208 #define g_atomic_int_add(atomic, val) \
209 (G_GNUC_EXTENSION ({ \
210 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
211 (void) (0 ? *(atomic) ^ (val) : 1); \
212 (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
214 #define g_atomic_int_and(atomic, val) \
215 (G_GNUC_EXTENSION ({ \
216 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
217 (void) (0 ? *(atomic) ^ (val) : 1); \
218 (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \
220 #define g_atomic_int_or(atomic, val) \
221 (G_GNUC_EXTENSION ({ \
222 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
223 (void) (0 ? *(atomic) ^ (val) : 1); \
224 (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \
226 #define g_atomic_int_xor(atomic, val) \
227 (G_GNUC_EXTENSION ({ \
228 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
229 (void) (0 ? *(atomic) ^ (val) : 1); \
230 (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \
233 #if defined(glib_typeof) && defined(G_CXX_STD_VERSION)
234 /* This is typesafe because we check we can assign oldval to the type of
235 * (*atomic). Unfortunately it can only be done in C++ because gcc/clang warn
236 * when atomic is volatile and not oldval, or when atomic is gsize* and oldval
237 * is NULL. Note that clang++ force us to be typesafe because it is an error if the 2nd
238 * argument of __atomic_compare_exchange_n() has a different type than the
240 * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1919
241 * https://gitlab.gnome.org/GNOME/glib/-/merge_requests/1715#note_1024120. */
242 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
243 (G_GNUC_EXTENSION ({ \
244 G_STATIC_ASSERT (sizeof (static_cast<glib_typeof (*(atomic))>((oldval))) \
245 == sizeof (gpointer)); \
246 glib_typeof (*(atomic)) gapcae_oldval = (oldval); \
247 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
248 (void) (0 ? (gpointer) *(atomic) : NULL); \
249 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
251 #else /* if !(defined(glib_typeof) && defined(G_CXX_STD_VERSION) */
252 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
253 (G_GNUC_EXTENSION ({ \
254 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
255 gpointer gapcae_oldval = (gpointer)(oldval); \
256 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
257 (void) (0 ? (gpointer) *(atomic) : NULL); \
258 __atomic_compare_exchange_n ((atomic), (void *) (&(gapcae_oldval)), (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
260 #endif /* defined(glib_typeof) */
261 #define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, preval) \
262 (G_GNUC_EXTENSION ({ \
263 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
264 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gpointer)); \
265 (void) (0 ? (gpointer) *(atomic) : NULL); \
266 (void) (0 ? (gpointer) *(preval) : NULL); \
267 *(preval) = (oldval); \
268 __atomic_compare_exchange_n ((atomic), (preval), (newval), FALSE, \
269 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? \
272 #define g_atomic_pointer_exchange(atomic, newval) \
273 (G_GNUC_EXTENSION ({ \
274 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
275 (void) (0 ? (gpointer) *(atomic) : NULL); \
276 (gpointer) __atomic_exchange_n ((atomic), (newval), __ATOMIC_SEQ_CST); \
278 #define g_atomic_pointer_add(atomic, val) \
279 (G_GNUC_EXTENSION ({ \
280 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
281 (void) (0 ? (gpointer) *(atomic) : NULL); \
282 (void) (0 ? (val) ^ (val) : 1); \
283 (gintptr) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
285 #define g_atomic_pointer_and(atomic, val) \
286 (G_GNUC_EXTENSION ({ \
287 guintptr *gapa_atomic = (guintptr *) (atomic); \
288 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
289 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
290 (void) (0 ? (gpointer) *(atomic) : NULL); \
291 (void) (0 ? (val) ^ (val) : 1); \
292 (guintptr) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \
294 #define g_atomic_pointer_or(atomic, val) \
295 (G_GNUC_EXTENSION ({ \
296 guintptr *gapo_atomic = (guintptr *) (atomic); \
297 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
298 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
299 (void) (0 ? (gpointer) *(atomic) : NULL); \
300 (void) (0 ? (val) ^ (val) : 1); \
301 (guintptr) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \
303 #define g_atomic_pointer_xor(atomic, val) \
304 (G_GNUC_EXTENSION ({ \
305 guintptr *gapx_atomic = (guintptr *) (atomic); \
306 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
307 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (guintptr)); \
308 (void) (0 ? (gpointer) *(atomic) : NULL); \
309 (void) (0 ? (val) ^ (val) : 1); \
310 (guintptr) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \
313 #else /* defined(__ATOMIC_SEQ_CST) */
315 /* We want to achieve __ATOMIC_SEQ_CST semantics here. See
316 * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
317 * operations, that means performing an *acquire*:
318 * > A load operation with this memory order performs the acquire operation on
319 * > the affected memory location: no reads or writes in the current thread can
320 * > be reordered before this load. All writes in other threads that release
321 * > the same atomic variable are visible in the current thread.
323 * “no reads or writes in the current thread can be reordered before this load”
324 * is implemented using a compiler barrier (a no-op `__asm__` section) to
325 * prevent instruction reordering. Writes in other threads are synchronised
326 * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
327 * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
330 * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
331 * > A store operation with this memory order performs the release operation:
332 * > no reads or writes in the current thread can be reordered after this store.
333 * > All writes in the current thread are visible in other threads that acquire
334 * > the same atomic variable (see Release-Acquire ordering below) and writes
335 * > that carry a dependency into the atomic variable become visible in other
336 * > threads that consume the same atomic (see Release-Consume ordering below).
338 * “no reads or writes in the current thread can be reordered after this store”
339 * is implemented using a compiler barrier to prevent instruction reordering.
340 * “All writes in the current thread are visible in other threads” is implemented
341 * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
343 #define g_atomic_int_get(atomic) \
344 (G_GNUC_EXTENSION ({ \
346 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
347 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
348 gaig_result = (gint) *(atomic); \
349 __sync_synchronize (); \
350 __asm__ __volatile__ ("" : : : "memory"); \
353 #define g_atomic_int_set(atomic, newval) \
354 (G_GNUC_EXTENSION ({ \
355 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
356 (void) (0 ? *(atomic) ^ (newval) : 1); \
357 __sync_synchronize (); \
358 __asm__ __volatile__ ("" : : : "memory"); \
359 *(atomic) = (newval); \
361 #define g_atomic_pointer_get(atomic) \
362 (G_GNUC_EXTENSION ({ \
363 gpointer gapg_result; \
364 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
365 gapg_result = (gpointer) *(atomic); \
366 __sync_synchronize (); \
367 __asm__ __volatile__ ("" : : : "memory"); \
370 #if defined(glib_typeof)
371 #define g_atomic_pointer_set(atomic, newval) \
372 (G_GNUC_EXTENSION ({ \
373 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
374 (void) (0 ? (gpointer) *(atomic) : NULL); \
375 __sync_synchronize (); \
376 __asm__ __volatile__ ("" : : : "memory"); \
377 *(atomic) = (glib_typeof (*(atomic))) (guintptr) (newval); \
379 #else /* if !(defined(glib_typeof) */
380 #define g_atomic_pointer_set(atomic, newval) \
381 (G_GNUC_EXTENSION ({ \
382 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
383 (void) (0 ? (gpointer) *(atomic) : NULL); \
384 __sync_synchronize (); \
385 __asm__ __volatile__ ("" : : : "memory"); \
386 *(atomic) = (gpointer) (guintptr) (newval); \
388 #endif /* if defined(glib_typeof) */
390 #define g_atomic_int_inc(atomic) \
391 (G_GNUC_EXTENSION ({ \
392 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
393 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
394 (void) __sync_fetch_and_add ((atomic), 1); \
396 #define g_atomic_int_dec_and_test(atomic) \
397 (G_GNUC_EXTENSION ({ \
398 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
399 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
400 __sync_fetch_and_sub ((atomic), 1) == 1; \
402 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
403 (G_GNUC_EXTENSION ({ \
404 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
405 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
406 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
408 #define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
409 (G_GNUC_EXTENSION ({ \
410 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
411 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gint)); \
412 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) ^ *(preval) : 1); \
413 *(preval) = __sync_val_compare_and_swap ((atomic), (oldval), (newval)); \
414 (*(preval) == (oldval)) ? TRUE : FALSE; \
416 #if defined(_GLIB_GCC_HAVE_SYNC_SWAP)
417 #define g_atomic_int_exchange(atomic, newval) \
418 (G_GNUC_EXTENSION ({ \
419 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
420 (void) (0 ? *(atomic) ^ (newval) : 1); \
421 (gint) __sync_swap ((atomic), (newval)); \
423 #else /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
424 #define g_atomic_int_exchange(atomic, newval) \
425 (G_GNUC_EXTENSION ({ \
427 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
428 (void) (0 ? *(atomic) ^ (newval) : 1); \
432 } while (!__sync_bool_compare_and_swap (atomic, oldval, newval)); \
435 #endif /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
436 #define g_atomic_int_add(atomic, val) \
437 (G_GNUC_EXTENSION ({ \
438 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
439 (void) (0 ? *(atomic) ^ (val) : 1); \
440 (gint) __sync_fetch_and_add ((atomic), (val)); \
442 #define g_atomic_int_and(atomic, val) \
443 (G_GNUC_EXTENSION ({ \
444 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
445 (void) (0 ? *(atomic) ^ (val) : 1); \
446 (guint) __sync_fetch_and_and ((atomic), (val)); \
448 #define g_atomic_int_or(atomic, val) \
449 (G_GNUC_EXTENSION ({ \
450 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
451 (void) (0 ? *(atomic) ^ (val) : 1); \
452 (guint) __sync_fetch_and_or ((atomic), (val)); \
454 #define g_atomic_int_xor(atomic, val) \
455 (G_GNUC_EXTENSION ({ \
456 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
457 (void) (0 ? *(atomic) ^ (val) : 1); \
458 (guint) __sync_fetch_and_xor ((atomic), (val)); \
461 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
462 (G_GNUC_EXTENSION ({ \
463 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
464 (void) (0 ? (gpointer) *(atomic) : NULL); \
465 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
467 #define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, preval) \
468 (G_GNUC_EXTENSION ({ \
469 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
470 G_STATIC_ASSERT (sizeof *(preval) == sizeof (gpointer)); \
471 (void) (0 ? (gpointer) *(atomic) : NULL); \
472 (void) (0 ? (gpointer) *(preval) : NULL); \
473 *(preval) = __sync_val_compare_and_swap ((atomic), (oldval), (newval)); \
474 (*(preval) == (oldval)) ? TRUE : FALSE; \
476 #if defined(_GLIB_GCC_HAVE_SYNC_SWAP)
477 #define g_atomic_pointer_exchange(atomic, newval) \
478 (G_GNUC_EXTENSION ({ \
479 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
480 (void) (0 ? (gpointer) *(atomic) : NULL); \
481 (gpointer) __sync_swap ((atomic), (newval)); \
484 #define g_atomic_pointer_exchange(atomic, newval) \
485 (G_GNUC_EXTENSION ({ \
487 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
488 (void) (0 ? (gpointer) *(atomic) : NULL); \
491 oldval = (gpointer) *atomic; \
492 } while (!__sync_bool_compare_and_swap (atomic, oldval, newval)); \
495 #endif /* defined(_GLIB_GCC_HAVE_SYNC_SWAP) */
496 #define g_atomic_pointer_add(atomic, val) \
497 (G_GNUC_EXTENSION ({ \
498 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
499 (void) (0 ? (gpointer) *(atomic) : NULL); \
500 (void) (0 ? (val) ^ (val) : 1); \
501 (gintptr) __sync_fetch_and_add ((atomic), (val)); \
503 #define g_atomic_pointer_and(atomic, val) \
504 (G_GNUC_EXTENSION ({ \
505 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
506 (void) (0 ? (gpointer) *(atomic) : NULL); \
507 (void) (0 ? (val) ^ (val) : 1); \
508 (guintptr) __sync_fetch_and_and ((atomic), (val)); \
510 #define g_atomic_pointer_or(atomic, val) \
511 (G_GNUC_EXTENSION ({ \
512 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
513 (void) (0 ? (gpointer) *(atomic) : NULL); \
514 (void) (0 ? (val) ^ (val) : 1); \
515 (guintptr) __sync_fetch_and_or ((atomic), (val)); \
517 #define g_atomic_pointer_xor(atomic, val) \
518 (G_GNUC_EXTENSION ({ \
519 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
520 (void) (0 ? (gpointer) *(atomic) : NULL); \
521 (void) (0 ? (val) ^ (val) : 1); \
522 (guintptr) __sync_fetch_and_xor ((atomic), (val)); \
525 #endif /* !defined(__ATOMIC_SEQ_CST) */
527 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
529 #define g_atomic_int_get(atomic) \
530 (g_atomic_int_get ((gint *) (atomic)))
531 #define g_atomic_int_set(atomic, newval) \
532 (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
533 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
534 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
535 #define g_atomic_int_compare_and_exchange_full(atomic, oldval, newval, preval) \
536 (g_atomic_int_compare_and_exchange_full ((gint *) (atomic), (oldval), (newval), (gint *) (preval)))
537 #define g_atomic_int_exchange(atomic, newval) \
538 (g_atomic_int_exchange ((gint *) (atomic), (newval)))
539 #define g_atomic_int_add(atomic, val) \
540 (g_atomic_int_add ((gint *) (atomic), (val)))
541 #define g_atomic_int_and(atomic, val) \
542 (g_atomic_int_and ((guint *) (atomic), (val)))
543 #define g_atomic_int_or(atomic, val) \
544 (g_atomic_int_or ((guint *) (atomic), (val)))
545 #define g_atomic_int_xor(atomic, val) \
546 (g_atomic_int_xor ((guint *) (atomic), (val)))
547 #define g_atomic_int_inc(atomic) \
548 (g_atomic_int_inc ((gint *) (atomic)))
549 #define g_atomic_int_dec_and_test(atomic) \
550 (g_atomic_int_dec_and_test ((gint *) (atomic)))
552 #if defined(glib_typeof)
553 /* The (void *) cast in the middle *looks* redundant, because
554 * g_atomic_pointer_get returns void * already, but it's to silence
555 * -Werror=bad-function-cast when we're doing something like:
556 * guintptr a, b; ...; a = g_atomic_pointer_get (&b);
557 * which would otherwise be assigning the void * result of
558 * g_atomic_pointer_get directly to the pointer-sized but
559 * non-pointer-typed result. */
560 #define g_atomic_pointer_get(atomic) \
561 (glib_typeof (*(atomic))) (void *) ((g_atomic_pointer_get) ((void *) atomic))
562 #else /* !(defined(glib_typeof) */
563 #define g_atomic_pointer_get(atomic) \
564 (g_atomic_pointer_get (atomic))
567 #define g_atomic_pointer_set(atomic, newval) \
568 (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
570 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
571 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
572 #define g_atomic_pointer_compare_and_exchange_full(atomic, oldval, newval, prevval) \
573 (g_atomic_pointer_compare_and_exchange_full ((atomic), (gpointer) (oldval), (gpointer) (newval), (prevval)))
574 #define g_atomic_pointer_exchange(atomic, newval) \
575 (g_atomic_pointer_exchange ((atomic), (gpointer) (newval)))
576 #define g_atomic_pointer_add(atomic, val) \
577 (g_atomic_pointer_add ((atomic), (gssize) (val)))
578 #define g_atomic_pointer_and(atomic, val) \
579 (g_atomic_pointer_and ((atomic), (gsize) (val)))
580 #define g_atomic_pointer_or(atomic, val) \
581 (g_atomic_pointer_or ((atomic), (gsize) (val)))
582 #define g_atomic_pointer_xor(atomic, val) \
583 (g_atomic_pointer_xor ((atomic), (gsize) (val)))
585 #endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
587 #endif /* __G_ATOMIC_H__ */