Imported Upstream version 2.67.0
[platform/upstream/glib.git] / glib / gatomic.h
1 /*
2  * Copyright © 2011 Ryan Lortie
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful, but
10  * WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
16  *
17  * Author: Ryan Lortie <desrt@desrt.ca>
18  */
19
20 #ifndef __G_ATOMIC_H__
21 #define __G_ATOMIC_H__
22
23 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
24 #error "Only <glib.h> can be included directly."
25 #endif
26
27 #include <glib/gtypes.h>
28
29 G_BEGIN_DECLS
30
31 GLIB_AVAILABLE_IN_ALL
32 gint                    g_atomic_int_get                      (const volatile gint *atomic);
33 GLIB_AVAILABLE_IN_ALL
34 void                    g_atomic_int_set                      (volatile gint  *atomic,
35                                                                gint            newval);
36 GLIB_AVAILABLE_IN_ALL
37 void                    g_atomic_int_inc                      (volatile gint  *atomic);
38 GLIB_AVAILABLE_IN_ALL
39 gboolean                g_atomic_int_dec_and_test             (volatile gint  *atomic);
40 GLIB_AVAILABLE_IN_ALL
41 gboolean                g_atomic_int_compare_and_exchange     (volatile gint  *atomic,
42                                                                gint            oldval,
43                                                                gint            newval);
44 GLIB_AVAILABLE_IN_ALL
45 gint                    g_atomic_int_add                      (volatile gint  *atomic,
46                                                                gint            val);
47 GLIB_AVAILABLE_IN_2_30
48 guint                   g_atomic_int_and                      (volatile guint *atomic,
49                                                                guint           val);
50 GLIB_AVAILABLE_IN_2_30
51 guint                   g_atomic_int_or                       (volatile guint *atomic,
52                                                                guint           val);
53 GLIB_AVAILABLE_IN_ALL
54 guint                   g_atomic_int_xor                      (volatile guint *atomic,
55                                                                guint           val);
56
57 GLIB_AVAILABLE_IN_ALL
58 gpointer                g_atomic_pointer_get                  (const volatile void *atomic);
59 GLIB_AVAILABLE_IN_ALL
60 void                    g_atomic_pointer_set                  (volatile void  *atomic,
61                                                                gpointer        newval);
62 GLIB_AVAILABLE_IN_ALL
63 gboolean                g_atomic_pointer_compare_and_exchange (volatile void  *atomic,
64                                                                gpointer        oldval,
65                                                                gpointer        newval);
66 GLIB_AVAILABLE_IN_ALL
67 gssize                  g_atomic_pointer_add                  (volatile void  *atomic,
68                                                                gssize          val);
69 GLIB_AVAILABLE_IN_2_30
70 gsize                   g_atomic_pointer_and                  (volatile void  *atomic,
71                                                                gsize           val);
72 GLIB_AVAILABLE_IN_2_30
73 gsize                   g_atomic_pointer_or                   (volatile void  *atomic,
74                                                                gsize           val);
75 GLIB_AVAILABLE_IN_ALL
76 gsize                   g_atomic_pointer_xor                  (volatile void  *atomic,
77                                                                gsize           val);
78
79 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
80 gint                    g_atomic_int_exchange_and_add         (volatile gint  *atomic,
81                                                                gint            val);
82
83 G_END_DECLS
84
85 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
86
87 /* We prefer the new C11-style atomic extension of GCC if available */
88 #if defined(__ATOMIC_SEQ_CST)
89
90 #define g_atomic_int_get(atomic) \
91   (G_GNUC_EXTENSION ({                                                       \
92     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
93     gint gaig_temp;                                                          \
94     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
95     __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST);          \
96     (gint) gaig_temp;                                                        \
97   }))
98 #define g_atomic_int_set(atomic, newval) \
99   (G_GNUC_EXTENSION ({                                                       \
100     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
101     gint gais_temp = (gint) (newval);                                        \
102     (void) (0 ? *(atomic) ^ (newval) : 1);                                   \
103     __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST);         \
104   }))
105
106 #if defined(glib_typeof)
107 #define g_atomic_pointer_get(atomic)                                       \
108   (G_GNUC_EXTENSION ({                                                     \
109     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));               \
110     glib_typeof (*(atomic)) gapg_temp_newval;                              \
111     glib_typeof ((atomic)) gapg_temp_atomic = (atomic);                    \
112     __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST); \
113     gapg_temp_newval;                                                      \
114   }))
115 #define g_atomic_pointer_set(atomic, newval)                                \
116   (G_GNUC_EXTENSION ({                                                      \
117     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                \
118     glib_typeof ((atomic)) gaps_temp_atomic = (atomic);                     \
119     glib_typeof (*(atomic)) gaps_temp_newval = (newval);                    \
120     (void) (0 ? (gpointer) * (atomic) : NULL);                              \
121     __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST); \
122   }))
123 #else /* if !defined(glib_typeof) */
124 #define g_atomic_pointer_get(atomic) \
125   (G_GNUC_EXTENSION ({                                                       \
126     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
127     gpointer gapg_temp_newval;                                               \
128     gpointer *gapg_temp_atomic = (gpointer *)(atomic);                       \
129     __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST);   \
130     gapg_temp_newval;                                                        \
131   }))
132 #define g_atomic_pointer_set(atomic, newval) \
133   (G_GNUC_EXTENSION ({                                                       \
134     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
135     gpointer *gaps_temp_atomic = (gpointer *)(atomic);                       \
136     gpointer gaps_temp_newval = (gpointer)(newval);                          \
137     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
138     __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST);  \
139   }))
140 #endif /* !defined(glib_typeof) */
141
142 #define g_atomic_int_inc(atomic) \
143   (G_GNUC_EXTENSION ({                                                       \
144     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
145     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
146     (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST);               \
147   }))
148 #define g_atomic_int_dec_and_test(atomic) \
149   (G_GNUC_EXTENSION ({                                                       \
150     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
151     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
152     __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1;                 \
153   }))
154 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
155   (G_GNUC_EXTENSION ({                                                       \
156     gint gaicae_oldval = (oldval);                                           \
157     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
158     (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1);                        \
159     __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
160   }))
161 #define g_atomic_int_add(atomic, val) \
162   (G_GNUC_EXTENSION ({                                                       \
163     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
164     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
165     (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST);           \
166   }))
167 #define g_atomic_int_and(atomic, val) \
168   (G_GNUC_EXTENSION ({                                                       \
169     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
170     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
171     (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST);          \
172   }))
173 #define g_atomic_int_or(atomic, val) \
174   (G_GNUC_EXTENSION ({                                                       \
175     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
176     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
177     (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST);           \
178   }))
179 #define g_atomic_int_xor(atomic, val) \
180   (G_GNUC_EXTENSION ({                                                       \
181     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
182     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
183     (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST);          \
184   }))
185
186 #if defined(glib_typeof)
187 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
188   (G_GNUC_EXTENSION ({                                                       \
189     G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer));                  \
190     glib_typeof ((oldval)) gapcae_oldval = (oldval);                         \
191     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
192     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
193     __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
194   }))
195 #else /* if !defined(glib_typeof) */
196 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
197   (G_GNUC_EXTENSION ({                                                       \
198     G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer));                  \
199     gpointer gapcae_oldval = (gpointer)(oldval);                             \
200     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
201     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
202     __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
203   }))
204 #endif /* defined(glib_typeof) */
205 #define g_atomic_pointer_add(atomic, val) \
206   (G_GNUC_EXTENSION ({                                                       \
207     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
208     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
209     (void) (0 ? (val) ^ (val) : 1);                                          \
210     (gssize) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST);         \
211   }))
212 #define g_atomic_pointer_and(atomic, val) \
213   (G_GNUC_EXTENSION ({                                                       \
214     volatile gsize *gapa_atomic = (volatile gsize *) (atomic);               \
215     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
216     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize));                    \
217     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
218     (void) (0 ? (val) ^ (val) : 1);                                          \
219     (gsize) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST);       \
220   }))
221 #define g_atomic_pointer_or(atomic, val) \
222   (G_GNUC_EXTENSION ({                                                       \
223     volatile gsize *gapo_atomic = (volatile gsize *) (atomic);               \
224     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
225     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize));                    \
226     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
227     (void) (0 ? (val) ^ (val) : 1);                                          \
228     (gsize) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST);        \
229   }))
230 #define g_atomic_pointer_xor(atomic, val) \
231   (G_GNUC_EXTENSION ({                                                       \
232     volatile gsize *gapx_atomic = (volatile gsize *) (atomic);               \
233     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
234     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize));                    \
235     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
236     (void) (0 ? (val) ^ (val) : 1);                                          \
237     (gsize) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST);       \
238   }))
239
240 #else /* defined(__ATOMIC_SEQ_CST) */
241
242 /* We want to achieve __ATOMIC_SEQ_CST semantics here. See
243  * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
244  * operations, that means performing an *acquire*:
245  * > A load operation with this memory order performs the acquire operation on
246  * > the affected memory location: no reads or writes in the current thread can
247  * > be reordered before this load. All writes in other threads that release
248  * > the same atomic variable are visible in the current thread.
249  *
250  * “no reads or writes in the current thread can be reordered before this load”
251  * is implemented using a compiler barrier (a no-op `__asm__` section) to
252  * prevent instruction reordering. Writes in other threads are synchronised
253  * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
254  * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
255  * one.
256  *
257  * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
258  * > A store operation with this memory order performs the release operation:
259  * > no reads or writes in the current thread can be reordered after this store.
260  * > All writes in the current thread are visible in other threads that acquire
261  * > the same atomic variable (see Release-Acquire ordering below) and writes
262  * > that carry a dependency into the atomic variable become visible in other
263  * > threads that consume the same atomic (see Release-Consume ordering below).
264  *
265  * “no reads or writes in the current thread can be reordered after this store”
266  * is implemented using a compiler barrier to prevent instruction reordering.
267  * “All writes in the current thread are visible in other threads” is implemented
268  * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
269  */
270 #define g_atomic_int_get(atomic) \
271   (G_GNUC_EXTENSION ({                                                       \
272     gint gaig_result;                                                        \
273     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
274     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
275     gaig_result = (gint) *(atomic);                                          \
276     __sync_synchronize ();                                                   \
277     __asm__ __volatile__ ("" : : : "memory");                                \
278     gaig_result;                                                             \
279   }))
280 #define g_atomic_int_set(atomic, newval) \
281   (G_GNUC_EXTENSION ({                                                       \
282     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
283     (void) (0 ? *(atomic) ^ (newval) : 1);                                   \
284     __sync_synchronize ();                                                   \
285     __asm__ __volatile__ ("" : : : "memory");                                \
286     *(atomic) = (newval);                                                    \
287   }))
288 #define g_atomic_pointer_get(atomic) \
289   (G_GNUC_EXTENSION ({                                                       \
290     gpointer gapg_result;                                                    \
291     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
292     gapg_result = (gpointer) *(atomic);                                      \
293     __sync_synchronize ();                                                   \
294     __asm__ __volatile__ ("" : : : "memory");                                \
295     gapg_result;                                                             \
296   }))
297 #if defined(glib_typeof)
298 #define g_atomic_pointer_set(atomic, newval) \
299   (G_GNUC_EXTENSION ({                                                       \
300     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
301     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
302     __sync_synchronize ();                                                   \
303     __asm__ __volatile__ ("" : : : "memory");                                \
304     *(atomic) = (glib_typeof (*(atomic))) (gsize) (newval);                  \
305   }))
306 #else /* if !defined(glib_typeof) */
307 #define g_atomic_pointer_set(atomic, newval) \
308   (G_GNUC_EXTENSION ({                                                       \
309     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
310     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
311     __sync_synchronize ();                                                   \
312     __asm__ __volatile__ ("" : : : "memory");                                \
313     *(atomic) = (gpointer) (gsize) (newval);                                         \
314   }))
315 #endif /* defined(glib_typeof) */
316
317 #define g_atomic_int_inc(atomic) \
318   (G_GNUC_EXTENSION ({                                                       \
319     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
320     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
321     (void) __sync_fetch_and_add ((atomic), 1);                               \
322   }))
323 #define g_atomic_int_dec_and_test(atomic) \
324   (G_GNUC_EXTENSION ({                                                       \
325     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
326     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
327     __sync_fetch_and_sub ((atomic), 1) == 1;                                 \
328   }))
329 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
330   (G_GNUC_EXTENSION ({                                                       \
331     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
332     (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1);                        \
333     __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
334   }))
335 #define g_atomic_int_add(atomic, val) \
336   (G_GNUC_EXTENSION ({                                                       \
337     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
338     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
339     (gint) __sync_fetch_and_add ((atomic), (val));                           \
340   }))
341 #define g_atomic_int_and(atomic, val) \
342   (G_GNUC_EXTENSION ({                                                       \
343     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
344     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
345     (guint) __sync_fetch_and_and ((atomic), (val));                          \
346   }))
347 #define g_atomic_int_or(atomic, val) \
348   (G_GNUC_EXTENSION ({                                                       \
349     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
350     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
351     (guint) __sync_fetch_and_or ((atomic), (val));                           \
352   }))
353 #define g_atomic_int_xor(atomic, val) \
354   (G_GNUC_EXTENSION ({                                                       \
355     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
356     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
357     (guint) __sync_fetch_and_xor ((atomic), (val));                          \
358   }))
359
360 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
361   (G_GNUC_EXTENSION ({                                                       \
362     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
363     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
364     __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
365   }))
366 #define g_atomic_pointer_add(atomic, val) \
367   (G_GNUC_EXTENSION ({                                                       \
368     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
369     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
370     (void) (0 ? (val) ^ (val) : 1);                                          \
371     (gssize) __sync_fetch_and_add ((atomic), (val));                         \
372   }))
373 #define g_atomic_pointer_and(atomic, val) \
374   (G_GNUC_EXTENSION ({                                                       \
375     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
376     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
377     (void) (0 ? (val) ^ (val) : 1);                                          \
378     (gsize) __sync_fetch_and_and ((atomic), (val));                          \
379   }))
380 #define g_atomic_pointer_or(atomic, val) \
381   (G_GNUC_EXTENSION ({                                                       \
382     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
383     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
384     (void) (0 ? (val) ^ (val) : 1);                                          \
385     (gsize) __sync_fetch_and_or ((atomic), (val));                           \
386   }))
387 #define g_atomic_pointer_xor(atomic, val) \
388   (G_GNUC_EXTENSION ({                                                       \
389     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
390     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
391     (void) (0 ? (val) ^ (val) : 1);                                          \
392     (gsize) __sync_fetch_and_xor ((atomic), (val));                          \
393   }))
394
395 #endif /* !defined(__ATOMIC_SEQ_CST) */
396
397 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
398
399 #define g_atomic_int_get(atomic) \
400   (g_atomic_int_get ((gint *) (atomic)))
401 #define g_atomic_int_set(atomic, newval) \
402   (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
403 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
404   (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
405 #define g_atomic_int_add(atomic, val) \
406   (g_atomic_int_add ((gint *) (atomic), (val)))
407 #define g_atomic_int_and(atomic, val) \
408   (g_atomic_int_and ((guint *) (atomic), (val)))
409 #define g_atomic_int_or(atomic, val) \
410   (g_atomic_int_or ((guint *) (atomic), (val)))
411 #define g_atomic_int_xor(atomic, val) \
412   (g_atomic_int_xor ((guint *) (atomic), (val)))
413 #define g_atomic_int_inc(atomic) \
414   (g_atomic_int_inc ((gint *) (atomic)))
415 #define g_atomic_int_dec_and_test(atomic) \
416   (g_atomic_int_dec_and_test ((gint *) (atomic)))
417
418 #define g_atomic_pointer_get(atomic) \
419   (g_atomic_pointer_get (atomic))
420 #define g_atomic_pointer_set(atomic, newval) \
421   (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
422 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
423   (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
424 #define g_atomic_pointer_add(atomic, val) \
425   (g_atomic_pointer_add ((atomic), (gssize) (val)))
426 #define g_atomic_pointer_and(atomic, val) \
427   (g_atomic_pointer_and ((atomic), (gsize) (val)))
428 #define g_atomic_pointer_or(atomic, val) \
429   (g_atomic_pointer_or ((atomic), (gsize) (val)))
430 #define g_atomic_pointer_xor(atomic, val) \
431   (g_atomic_pointer_xor ((atomic), (gsize) (val)))
432
433 #endif /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
434
435 #endif /* __G_ATOMIC_H__ */