2 * Copyright © 2011 Ryan Lortie
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
17 * Author: Ryan Lortie <desrt@desrt.ca>
20 #ifndef __G_ATOMIC_H__
21 #define __G_ATOMIC_H__
23 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
24 #error "Only <glib.h> can be included directly."
27 #include <glib/gtypes.h>
32 gint g_atomic_int_get (const volatile gint *atomic);
34 void g_atomic_int_set (volatile gint *atomic,
37 void g_atomic_int_inc (volatile gint *atomic);
39 gboolean g_atomic_int_dec_and_test (volatile gint *atomic);
41 gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic,
45 gint g_atomic_int_add (volatile gint *atomic,
47 GLIB_AVAILABLE_IN_2_30
48 guint g_atomic_int_and (volatile guint *atomic,
50 GLIB_AVAILABLE_IN_2_30
51 guint g_atomic_int_or (volatile guint *atomic,
54 guint g_atomic_int_xor (volatile guint *atomic,
58 gpointer g_atomic_pointer_get (const volatile void *atomic);
60 void g_atomic_pointer_set (volatile void *atomic,
63 gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic,
67 gssize g_atomic_pointer_add (volatile void *atomic,
69 GLIB_AVAILABLE_IN_2_30
70 gsize g_atomic_pointer_and (volatile void *atomic,
72 GLIB_AVAILABLE_IN_2_30
73 gsize g_atomic_pointer_or (volatile void *atomic,
76 gsize g_atomic_pointer_xor (volatile void *atomic,
79 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
80 gint g_atomic_int_exchange_and_add (volatile gint *atomic,
85 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
87 /* We prefer the new C11-style atomic extension of GCC if available */
88 #if defined(__ATOMIC_SEQ_CST)
90 #define g_atomic_int_get(atomic) \
91 (G_GNUC_EXTENSION ({ \
92 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
94 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
95 __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \
98 #define g_atomic_int_set(atomic, newval) \
99 (G_GNUC_EXTENSION ({ \
100 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
101 gint gais_temp = (gint) (newval); \
102 (void) (0 ? *(atomic) ^ (newval) : 1); \
103 __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \
106 #if defined(glib_typeof)
107 #define g_atomic_pointer_get(atomic) \
108 (G_GNUC_EXTENSION ({ \
109 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
110 glib_typeof (*(atomic)) gapg_temp_newval; \
111 glib_typeof ((atomic)) gapg_temp_atomic = (atomic); \
112 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
115 #define g_atomic_pointer_set(atomic, newval) \
116 (G_GNUC_EXTENSION ({ \
117 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
118 glib_typeof ((atomic)) gaps_temp_atomic = (atomic); \
119 glib_typeof (*(atomic)) gaps_temp_newval = (newval); \
120 (void) (0 ? (gpointer) * (atomic) : NULL); \
121 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
123 #else /* if !defined(glib_typeof) */
124 #define g_atomic_pointer_get(atomic) \
125 (G_GNUC_EXTENSION ({ \
126 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
127 gpointer gapg_temp_newval; \
128 gpointer *gapg_temp_atomic = (gpointer *)(atomic); \
129 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
132 #define g_atomic_pointer_set(atomic, newval) \
133 (G_GNUC_EXTENSION ({ \
134 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
135 gpointer *gaps_temp_atomic = (gpointer *)(atomic); \
136 gpointer gaps_temp_newval = (gpointer)(newval); \
137 (void) (0 ? (gpointer) *(atomic) : NULL); \
138 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
140 #endif /* !defined(glib_typeof) */
142 #define g_atomic_int_inc(atomic) \
143 (G_GNUC_EXTENSION ({ \
144 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
145 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
146 (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \
148 #define g_atomic_int_dec_and_test(atomic) \
149 (G_GNUC_EXTENSION ({ \
150 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
151 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
152 __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \
154 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
155 (G_GNUC_EXTENSION ({ \
156 gint gaicae_oldval = (oldval); \
157 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
158 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
159 __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
161 #define g_atomic_int_add(atomic, val) \
162 (G_GNUC_EXTENSION ({ \
163 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
164 (void) (0 ? *(atomic) ^ (val) : 1); \
165 (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
167 #define g_atomic_int_and(atomic, val) \
168 (G_GNUC_EXTENSION ({ \
169 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
170 (void) (0 ? *(atomic) ^ (val) : 1); \
171 (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \
173 #define g_atomic_int_or(atomic, val) \
174 (G_GNUC_EXTENSION ({ \
175 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
176 (void) (0 ? *(atomic) ^ (val) : 1); \
177 (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \
179 #define g_atomic_int_xor(atomic, val) \
180 (G_GNUC_EXTENSION ({ \
181 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
182 (void) (0 ? *(atomic) ^ (val) : 1); \
183 (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \
186 #if defined(glib_typeof)
187 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
188 (G_GNUC_EXTENSION ({ \
189 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
190 glib_typeof ((oldval)) gapcae_oldval = (oldval); \
191 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
192 (void) (0 ? (gpointer) *(atomic) : NULL); \
193 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
195 #else /* if !defined(glib_typeof) */
196 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
197 (G_GNUC_EXTENSION ({ \
198 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
199 gpointer gapcae_oldval = (gpointer)(oldval); \
200 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
201 (void) (0 ? (gpointer) *(atomic) : NULL); \
202 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
204 #endif /* defined(glib_typeof) */
205 #define g_atomic_pointer_add(atomic, val) \
206 (G_GNUC_EXTENSION ({ \
207 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
208 (void) (0 ? (gpointer) *(atomic) : NULL); \
209 (void) (0 ? (val) ^ (val) : 1); \
210 (gssize) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
212 #define g_atomic_pointer_and(atomic, val) \
213 (G_GNUC_EXTENSION ({ \
214 volatile gsize *gapa_atomic = (volatile gsize *) (atomic); \
215 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
216 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
217 (void) (0 ? (gpointer) *(atomic) : NULL); \
218 (void) (0 ? (val) ^ (val) : 1); \
219 (gsize) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \
221 #define g_atomic_pointer_or(atomic, val) \
222 (G_GNUC_EXTENSION ({ \
223 volatile gsize *gapo_atomic = (volatile gsize *) (atomic); \
224 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
225 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
226 (void) (0 ? (gpointer) *(atomic) : NULL); \
227 (void) (0 ? (val) ^ (val) : 1); \
228 (gsize) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \
230 #define g_atomic_pointer_xor(atomic, val) \
231 (G_GNUC_EXTENSION ({ \
232 volatile gsize *gapx_atomic = (volatile gsize *) (atomic); \
233 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
234 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
235 (void) (0 ? (gpointer) *(atomic) : NULL); \
236 (void) (0 ? (val) ^ (val) : 1); \
237 (gsize) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \
240 #else /* defined(__ATOMIC_SEQ_CST) */
242 /* We want to achieve __ATOMIC_SEQ_CST semantics here. See
243 * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
244 * operations, that means performing an *acquire*:
245 * > A load operation with this memory order performs the acquire operation on
246 * > the affected memory location: no reads or writes in the current thread can
247 * > be reordered before this load. All writes in other threads that release
248 * > the same atomic variable are visible in the current thread.
250 * “no reads or writes in the current thread can be reordered before this load”
251 * is implemented using a compiler barrier (a no-op `__asm__` section) to
252 * prevent instruction reordering. Writes in other threads are synchronised
253 * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
254 * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
257 * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
258 * > A store operation with this memory order performs the release operation:
259 * > no reads or writes in the current thread can be reordered after this store.
260 * > All writes in the current thread are visible in other threads that acquire
261 * > the same atomic variable (see Release-Acquire ordering below) and writes
262 * > that carry a dependency into the atomic variable become visible in other
263 * > threads that consume the same atomic (see Release-Consume ordering below).
265 * “no reads or writes in the current thread can be reordered after this store”
266 * is implemented using a compiler barrier to prevent instruction reordering.
267 * “All writes in the current thread are visible in other threads” is implemented
268 * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
270 #define g_atomic_int_get(atomic) \
271 (G_GNUC_EXTENSION ({ \
273 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
274 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
275 gaig_result = (gint) *(atomic); \
276 __sync_synchronize (); \
277 __asm__ __volatile__ ("" : : : "memory"); \
280 #define g_atomic_int_set(atomic, newval) \
281 (G_GNUC_EXTENSION ({ \
282 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
283 (void) (0 ? *(atomic) ^ (newval) : 1); \
284 __sync_synchronize (); \
285 __asm__ __volatile__ ("" : : : "memory"); \
286 *(atomic) = (newval); \
288 #define g_atomic_pointer_get(atomic) \
289 (G_GNUC_EXTENSION ({ \
290 gpointer gapg_result; \
291 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
292 gapg_result = (gpointer) *(atomic); \
293 __sync_synchronize (); \
294 __asm__ __volatile__ ("" : : : "memory"); \
297 #if defined(glib_typeof)
298 #define g_atomic_pointer_set(atomic, newval) \
299 (G_GNUC_EXTENSION ({ \
300 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
301 (void) (0 ? (gpointer) *(atomic) : NULL); \
302 __sync_synchronize (); \
303 __asm__ __volatile__ ("" : : : "memory"); \
304 *(atomic) = (glib_typeof (*(atomic))) (gsize) (newval); \
306 #else /* if !defined(glib_typeof) */
307 #define g_atomic_pointer_set(atomic, newval) \
308 (G_GNUC_EXTENSION ({ \
309 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
310 (void) (0 ? (gpointer) *(atomic) : NULL); \
311 __sync_synchronize (); \
312 __asm__ __volatile__ ("" : : : "memory"); \
313 *(atomic) = (gpointer) (gsize) (newval); \
315 #endif /* defined(glib_typeof) */
317 #define g_atomic_int_inc(atomic) \
318 (G_GNUC_EXTENSION ({ \
319 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
320 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
321 (void) __sync_fetch_and_add ((atomic), 1); \
323 #define g_atomic_int_dec_and_test(atomic) \
324 (G_GNUC_EXTENSION ({ \
325 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
326 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
327 __sync_fetch_and_sub ((atomic), 1) == 1; \
329 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
330 (G_GNUC_EXTENSION ({ \
331 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
332 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
333 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
335 #define g_atomic_int_add(atomic, val) \
336 (G_GNUC_EXTENSION ({ \
337 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
338 (void) (0 ? *(atomic) ^ (val) : 1); \
339 (gint) __sync_fetch_and_add ((atomic), (val)); \
341 #define g_atomic_int_and(atomic, val) \
342 (G_GNUC_EXTENSION ({ \
343 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
344 (void) (0 ? *(atomic) ^ (val) : 1); \
345 (guint) __sync_fetch_and_and ((atomic), (val)); \
347 #define g_atomic_int_or(atomic, val) \
348 (G_GNUC_EXTENSION ({ \
349 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
350 (void) (0 ? *(atomic) ^ (val) : 1); \
351 (guint) __sync_fetch_and_or ((atomic), (val)); \
353 #define g_atomic_int_xor(atomic, val) \
354 (G_GNUC_EXTENSION ({ \
355 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
356 (void) (0 ? *(atomic) ^ (val) : 1); \
357 (guint) __sync_fetch_and_xor ((atomic), (val)); \
360 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
361 (G_GNUC_EXTENSION ({ \
362 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
363 (void) (0 ? (gpointer) *(atomic) : NULL); \
364 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
366 #define g_atomic_pointer_add(atomic, val) \
367 (G_GNUC_EXTENSION ({ \
368 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
369 (void) (0 ? (gpointer) *(atomic) : NULL); \
370 (void) (0 ? (val) ^ (val) : 1); \
371 (gssize) __sync_fetch_and_add ((atomic), (val)); \
373 #define g_atomic_pointer_and(atomic, val) \
374 (G_GNUC_EXTENSION ({ \
375 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
376 (void) (0 ? (gpointer) *(atomic) : NULL); \
377 (void) (0 ? (val) ^ (val) : 1); \
378 (gsize) __sync_fetch_and_and ((atomic), (val)); \
380 #define g_atomic_pointer_or(atomic, val) \
381 (G_GNUC_EXTENSION ({ \
382 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
383 (void) (0 ? (gpointer) *(atomic) : NULL); \
384 (void) (0 ? (val) ^ (val) : 1); \
385 (gsize) __sync_fetch_and_or ((atomic), (val)); \
387 #define g_atomic_pointer_xor(atomic, val) \
388 (G_GNUC_EXTENSION ({ \
389 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
390 (void) (0 ? (gpointer) *(atomic) : NULL); \
391 (void) (0 ? (val) ^ (val) : 1); \
392 (gsize) __sync_fetch_and_xor ((atomic), (val)); \
395 #endif /* !defined(__ATOMIC_SEQ_CST) */
397 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
399 #define g_atomic_int_get(atomic) \
400 (g_atomic_int_get ((gint *) (atomic)))
401 #define g_atomic_int_set(atomic, newval) \
402 (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
403 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
404 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
405 #define g_atomic_int_add(atomic, val) \
406 (g_atomic_int_add ((gint *) (atomic), (val)))
407 #define g_atomic_int_and(atomic, val) \
408 (g_atomic_int_and ((guint *) (atomic), (val)))
409 #define g_atomic_int_or(atomic, val) \
410 (g_atomic_int_or ((guint *) (atomic), (val)))
411 #define g_atomic_int_xor(atomic, val) \
412 (g_atomic_int_xor ((guint *) (atomic), (val)))
413 #define g_atomic_int_inc(atomic) \
414 (g_atomic_int_inc ((gint *) (atomic)))
415 #define g_atomic_int_dec_and_test(atomic) \
416 (g_atomic_int_dec_and_test ((gint *) (atomic)))
418 #define g_atomic_pointer_get(atomic) \
419 (g_atomic_pointer_get (atomic))
420 #define g_atomic_pointer_set(atomic, newval) \
421 (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
422 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
423 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
424 #define g_atomic_pointer_add(atomic, val) \
425 (g_atomic_pointer_add ((atomic), (gssize) (val)))
426 #define g_atomic_pointer_and(atomic, val) \
427 (g_atomic_pointer_and ((atomic), (gsize) (val)))
428 #define g_atomic_pointer_or(atomic, val) \
429 (g_atomic_pointer_or ((atomic), (gsize) (val)))
430 #define g_atomic_pointer_xor(atomic, val) \
431 (g_atomic_pointer_xor ((atomic), (gsize) (val)))
433 #endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
435 #endif /* __G_ATOMIC_H__ */