2 * Copyright © 2011 Ryan Lortie
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
17 * Author: Ryan Lortie <desrt@desrt.ca>
20 #ifndef __G_ATOMIC_H__
21 #define __G_ATOMIC_H__
23 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
24 #error "Only <glib.h> can be included directly."
27 #include <glib/gtypes.h>
32 gint g_atomic_int_get (const volatile gint *atomic);
34 void g_atomic_int_set (volatile gint *atomic,
37 void g_atomic_int_inc (volatile gint *atomic);
39 gboolean g_atomic_int_dec_and_test (volatile gint *atomic);
41 gboolean g_atomic_int_compare_and_exchange (volatile gint *atomic,
45 gint g_atomic_int_add (volatile gint *atomic,
47 GLIB_AVAILABLE_IN_2_30
48 guint g_atomic_int_and (volatile guint *atomic,
50 GLIB_AVAILABLE_IN_2_30
51 guint g_atomic_int_or (volatile guint *atomic,
54 guint g_atomic_int_xor (volatile guint *atomic,
58 gpointer g_atomic_pointer_get (const volatile void *atomic);
60 void g_atomic_pointer_set (volatile void *atomic,
63 gboolean g_atomic_pointer_compare_and_exchange (volatile void *atomic,
67 gssize g_atomic_pointer_add (volatile void *atomic,
69 GLIB_AVAILABLE_IN_2_30
70 gsize g_atomic_pointer_and (volatile void *atomic,
72 GLIB_AVAILABLE_IN_2_30
73 gsize g_atomic_pointer_or (volatile void *atomic,
76 gsize g_atomic_pointer_xor (volatile void *atomic,
79 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
80 gint g_atomic_int_exchange_and_add (volatile gint *atomic,
85 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
87 /* We prefer the new C11-style atomic extension of GCC if available */
88 #if defined(__ATOMIC_SEQ_CST)
90 #define g_atomic_int_get(atomic) \
91 (G_GNUC_EXTENSION ({ \
92 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
94 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
95 __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST); \
98 #define g_atomic_int_set(atomic, newval) \
99 (G_GNUC_EXTENSION ({ \
100 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
101 gint gais_temp = (gint) (newval); \
102 (void) (0 ? *(atomic) ^ (newval) : 1); \
103 __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST); \
106 #if defined(g_has_typeof)
107 #define g_atomic_pointer_get(atomic) \
108 (G_GNUC_EXTENSION ({ \
109 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
110 __typeof__(*(atomic)) gapg_temp_newval; \
111 __typeof__((atomic)) gapg_temp_atomic = (atomic); \
112 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
115 #define g_atomic_pointer_set(atomic, newval) \
116 (G_GNUC_EXTENSION ({ \
117 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
118 __typeof__((atomic)) gaps_temp_atomic = (atomic); \
119 __typeof__(*(atomic)) gaps_temp_newval = (newval); \
120 (void) (0 ? (gpointer) *(atomic) : NULL); \
121 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
123 #else /* if !defined(g_has_typeof) */
124 #define g_atomic_pointer_get(atomic) \
125 (G_GNUC_EXTENSION ({ \
126 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
127 gpointer gapg_temp_newval; \
128 gpointer *gapg_temp_atomic = (gpointer *)(atomic); \
129 __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
132 #define g_atomic_pointer_set(atomic, newval) \
133 (G_GNUC_EXTENSION ({ \
134 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
135 gpointer *gaps_temp_atomic = (gpointer *)(atomic); \
136 gpointer gaps_temp_newval = (gpointer)(newval); \
137 (void) (0 ? (gpointer) *(atomic) : NULL); \
138 __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
140 #endif /* !defined(g_has_typeof) */
142 #define g_atomic_int_inc(atomic) \
143 (G_GNUC_EXTENSION ({ \
144 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
145 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
146 (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST); \
148 #define g_atomic_int_dec_and_test(atomic) \
149 (G_GNUC_EXTENSION ({ \
150 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
151 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
152 __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1; \
154 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
155 (G_GNUC_EXTENSION ({ \
156 gint gaicae_oldval = (oldval); \
157 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
158 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
159 __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
161 #define g_atomic_int_add(atomic, val) \
162 (G_GNUC_EXTENSION ({ \
163 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
164 (void) (0 ? *(atomic) ^ (val) : 1); \
165 (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
167 #define g_atomic_int_and(atomic, val) \
168 (G_GNUC_EXTENSION ({ \
169 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
170 (void) (0 ? *(atomic) ^ (val) : 1); \
171 (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST); \
173 #define g_atomic_int_or(atomic, val) \
174 (G_GNUC_EXTENSION ({ \
175 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
176 (void) (0 ? *(atomic) ^ (val) : 1); \
177 (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST); \
179 #define g_atomic_int_xor(atomic, val) \
180 (G_GNUC_EXTENSION ({ \
181 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
182 (void) (0 ? *(atomic) ^ (val) : 1); \
183 (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST); \
186 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
187 (G_GNUC_EXTENSION ({ \
188 G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer)); \
189 __typeof__ ((oldval)) gapcae_oldval = (oldval); \
190 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
191 (void) (0 ? (gpointer) *(atomic) : NULL); \
192 __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
194 #define g_atomic_pointer_add(atomic, val) \
195 (G_GNUC_EXTENSION ({ \
196 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
197 (void) (0 ? (gpointer) *(atomic) : NULL); \
198 (void) (0 ? (val) ^ (val) : 1); \
199 (gssize) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST); \
201 #define g_atomic_pointer_and(atomic, val) \
202 (G_GNUC_EXTENSION ({ \
203 volatile gsize *gapa_atomic = (volatile gsize *) (atomic); \
204 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
205 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
206 (void) (0 ? (gpointer) *(atomic) : NULL); \
207 (void) (0 ? (val) ^ (val) : 1); \
208 (gsize) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST); \
210 #define g_atomic_pointer_or(atomic, val) \
211 (G_GNUC_EXTENSION ({ \
212 volatile gsize *gapo_atomic = (volatile gsize *) (atomic); \
213 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
214 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
215 (void) (0 ? (gpointer) *(atomic) : NULL); \
216 (void) (0 ? (val) ^ (val) : 1); \
217 (gsize) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST); \
219 #define g_atomic_pointer_xor(atomic, val) \
220 (G_GNUC_EXTENSION ({ \
221 volatile gsize *gapx_atomic = (volatile gsize *) (atomic); \
222 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
223 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize)); \
224 (void) (0 ? (gpointer) *(atomic) : NULL); \
225 (void) (0 ? (val) ^ (val) : 1); \
226 (gsize) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST); \
229 #else /* defined(__ATOMIC_SEQ_CST) */
231 /* We want to achieve __ATOMIC_SEQ_CST semantics here. See
232 * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
233 * operations, that means performing an *acquire*:
234 * > A load operation with this memory order performs the acquire operation on
235 * > the affected memory location: no reads or writes in the current thread can
236 * > be reordered before this load. All writes in other threads that release
237 * > the same atomic variable are visible in the current thread.
239 * “no reads or writes in the current thread can be reordered before this load”
240 * is implemented using a compiler barrier (a no-op `__asm__` section) to
241 * prevent instruction reordering. Writes in other threads are synchronised
242 * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
243 * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
246 * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
247 * > A store operation with this memory order performs the release operation:
248 * > no reads or writes in the current thread can be reordered after this store.
249 * > All writes in the current thread are visible in other threads that acquire
250 * > the same atomic variable (see Release-Acquire ordering below) and writes
251 * > that carry a dependency into the atomic variable become visible in other
252 * > threads that consume the same atomic (see Release-Consume ordering below).
254 * “no reads or writes in the current thread can be reordered after this store”
255 * is implemented using a compiler barrier to prevent instruction reordering.
256 * “All writes in the current thread are visible in other threads” is implemented
257 * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
259 #define g_atomic_int_get(atomic) \
260 (G_GNUC_EXTENSION ({ \
262 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
263 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
264 gaig_result = (gint) *(atomic); \
265 __sync_synchronize (); \
266 __asm__ __volatile__ ("" : : : "memory"); \
269 #define g_atomic_int_set(atomic, newval) \
270 (G_GNUC_EXTENSION ({ \
271 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
272 (void) (0 ? *(atomic) ^ (newval) : 1); \
273 __sync_synchronize (); \
274 __asm__ __volatile__ ("" : : : "memory"); \
275 *(atomic) = (newval); \
277 #define g_atomic_pointer_get(atomic) \
278 (G_GNUC_EXTENSION ({ \
279 gpointer gapg_result; \
280 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
281 gapg_result = (gpointer) *(atomic); \
282 __sync_synchronize (); \
283 __asm__ __volatile__ ("" : : : "memory"); \
286 #define g_atomic_pointer_set(atomic, newval) \
287 (G_GNUC_EXTENSION ({ \
288 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
289 (void) (0 ? (gpointer) *(atomic) : NULL); \
290 __sync_synchronize (); \
291 __asm__ __volatile__ ("" : : : "memory"); \
292 *(atomic) = (__typeof__ (*(atomic))) (gsize) (newval); \
295 #define g_atomic_int_inc(atomic) \
296 (G_GNUC_EXTENSION ({ \
297 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
298 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
299 (void) __sync_fetch_and_add ((atomic), 1); \
301 #define g_atomic_int_dec_and_test(atomic) \
302 (G_GNUC_EXTENSION ({ \
303 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
304 (void) (0 ? *(atomic) ^ *(atomic) : 1); \
305 __sync_fetch_and_sub ((atomic), 1) == 1; \
307 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
308 (G_GNUC_EXTENSION ({ \
309 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
310 (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1); \
311 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
313 #define g_atomic_int_add(atomic, val) \
314 (G_GNUC_EXTENSION ({ \
315 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
316 (void) (0 ? *(atomic) ^ (val) : 1); \
317 (gint) __sync_fetch_and_add ((atomic), (val)); \
319 #define g_atomic_int_and(atomic, val) \
320 (G_GNUC_EXTENSION ({ \
321 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
322 (void) (0 ? *(atomic) ^ (val) : 1); \
323 (guint) __sync_fetch_and_and ((atomic), (val)); \
325 #define g_atomic_int_or(atomic, val) \
326 (G_GNUC_EXTENSION ({ \
327 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
328 (void) (0 ? *(atomic) ^ (val) : 1); \
329 (guint) __sync_fetch_and_or ((atomic), (val)); \
331 #define g_atomic_int_xor(atomic, val) \
332 (G_GNUC_EXTENSION ({ \
333 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint)); \
334 (void) (0 ? *(atomic) ^ (val) : 1); \
335 (guint) __sync_fetch_and_xor ((atomic), (val)); \
338 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
339 (G_GNUC_EXTENSION ({ \
340 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
341 (void) (0 ? (gpointer) *(atomic) : NULL); \
342 __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
344 #define g_atomic_pointer_add(atomic, val) \
345 (G_GNUC_EXTENSION ({ \
346 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
347 (void) (0 ? (gpointer) *(atomic) : NULL); \
348 (void) (0 ? (val) ^ (val) : 1); \
349 (gssize) __sync_fetch_and_add ((atomic), (val)); \
351 #define g_atomic_pointer_and(atomic, val) \
352 (G_GNUC_EXTENSION ({ \
353 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
354 (void) (0 ? (gpointer) *(atomic) : NULL); \
355 (void) (0 ? (val) ^ (val) : 1); \
356 (gsize) __sync_fetch_and_and ((atomic), (val)); \
358 #define g_atomic_pointer_or(atomic, val) \
359 (G_GNUC_EXTENSION ({ \
360 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
361 (void) (0 ? (gpointer) *(atomic) : NULL); \
362 (void) (0 ? (val) ^ (val) : 1); \
363 (gsize) __sync_fetch_and_or ((atomic), (val)); \
365 #define g_atomic_pointer_xor(atomic, val) \
366 (G_GNUC_EXTENSION ({ \
367 G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer)); \
368 (void) (0 ? (gpointer) *(atomic) : NULL); \
369 (void) (0 ? (val) ^ (val) : 1); \
370 (gsize) __sync_fetch_and_xor ((atomic), (val)); \
373 #endif /* !defined(__ATOMIC_SEQ_CST) */
375 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
377 #define g_atomic_int_get(atomic) \
378 (g_atomic_int_get ((gint *) (atomic)))
379 #define g_atomic_int_set(atomic, newval) \
380 (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
381 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
382 (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
383 #define g_atomic_int_add(atomic, val) \
384 (g_atomic_int_add ((gint *) (atomic), (val)))
385 #define g_atomic_int_and(atomic, val) \
386 (g_atomic_int_and ((guint *) (atomic), (val)))
387 #define g_atomic_int_or(atomic, val) \
388 (g_atomic_int_or ((guint *) (atomic), (val)))
389 #define g_atomic_int_xor(atomic, val) \
390 (g_atomic_int_xor ((guint *) (atomic), (val)))
391 #define g_atomic_int_inc(atomic) \
392 (g_atomic_int_inc ((gint *) (atomic)))
393 #define g_atomic_int_dec_and_test(atomic) \
394 (g_atomic_int_dec_and_test ((gint *) (atomic)))
396 #define g_atomic_pointer_get(atomic) \
397 (g_atomic_pointer_get (atomic))
398 #define g_atomic_pointer_set(atomic, newval) \
399 (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
400 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
401 (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
402 #define g_atomic_pointer_add(atomic, val) \
403 (g_atomic_pointer_add ((atomic), (gssize) (val)))
404 #define g_atomic_pointer_and(atomic, val) \
405 (g_atomic_pointer_and ((atomic), (gsize) (val)))
406 #define g_atomic_pointer_or(atomic, val) \
407 (g_atomic_pointer_or ((atomic), (gsize) (val)))
408 #define g_atomic_pointer_xor(atomic, val) \
409 (g_atomic_pointer_xor ((atomic), (gsize) (val)))
411 #endif /* defined(__GNUC__) && defined(G_ATOMIC_OP_USE_GCC_BUILTINS) */
413 #endif /* __G_ATOMIC_H__ */