Imported Upstream version 2.64.5
[platform/upstream/glib.git] / glib / gatomic.h
1 /*
2  * Copyright © 2011 Ryan Lortie
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful, but
10  * WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
16  *
17  * Author: Ryan Lortie <desrt@desrt.ca>
18  */
19
20 #ifndef __G_ATOMIC_H__
21 #define __G_ATOMIC_H__
22
23 #if !defined (__GLIB_H_INSIDE__) && !defined (GLIB_COMPILATION)
24 #error "Only <glib.h> can be included directly."
25 #endif
26
27 #include <glib/gtypes.h>
28
29 G_BEGIN_DECLS
30
31 GLIB_AVAILABLE_IN_ALL
32 gint                    g_atomic_int_get                      (const volatile gint *atomic);
33 GLIB_AVAILABLE_IN_ALL
34 void                    g_atomic_int_set                      (volatile gint  *atomic,
35                                                                gint            newval);
36 GLIB_AVAILABLE_IN_ALL
37 void                    g_atomic_int_inc                      (volatile gint  *atomic);
38 GLIB_AVAILABLE_IN_ALL
39 gboolean                g_atomic_int_dec_and_test             (volatile gint  *atomic);
40 GLIB_AVAILABLE_IN_ALL
41 gboolean                g_atomic_int_compare_and_exchange     (volatile gint  *atomic,
42                                                                gint            oldval,
43                                                                gint            newval);
44 GLIB_AVAILABLE_IN_ALL
45 gint                    g_atomic_int_add                      (volatile gint  *atomic,
46                                                                gint            val);
47 GLIB_AVAILABLE_IN_2_30
48 guint                   g_atomic_int_and                      (volatile guint *atomic,
49                                                                guint           val);
50 GLIB_AVAILABLE_IN_2_30
51 guint                   g_atomic_int_or                       (volatile guint *atomic,
52                                                                guint           val);
53 GLIB_AVAILABLE_IN_ALL
54 guint                   g_atomic_int_xor                      (volatile guint *atomic,
55                                                                guint           val);
56
57 GLIB_AVAILABLE_IN_ALL
58 gpointer                g_atomic_pointer_get                  (const volatile void *atomic);
59 GLIB_AVAILABLE_IN_ALL
60 void                    g_atomic_pointer_set                  (volatile void  *atomic,
61                                                                gpointer        newval);
62 GLIB_AVAILABLE_IN_ALL
63 gboolean                g_atomic_pointer_compare_and_exchange (volatile void  *atomic,
64                                                                gpointer        oldval,
65                                                                gpointer        newval);
66 GLIB_AVAILABLE_IN_ALL
67 gssize                  g_atomic_pointer_add                  (volatile void  *atomic,
68                                                                gssize          val);
69 GLIB_AVAILABLE_IN_2_30
70 gsize                   g_atomic_pointer_and                  (volatile void  *atomic,
71                                                                gsize           val);
72 GLIB_AVAILABLE_IN_2_30
73 gsize                   g_atomic_pointer_or                   (volatile void  *atomic,
74                                                                gsize           val);
75 GLIB_AVAILABLE_IN_ALL
76 gsize                   g_atomic_pointer_xor                  (volatile void  *atomic,
77                                                                gsize           val);
78
79 GLIB_DEPRECATED_IN_2_30_FOR(g_atomic_int_add)
80 gint                    g_atomic_int_exchange_and_add         (volatile gint  *atomic,
81                                                                gint            val);
82
83 G_END_DECLS
84
85 #if defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)
86
87 /* We prefer the new C11-style atomic extension of GCC if available */
88 #if defined(__ATOMIC_SEQ_CST)
89
90 #define g_atomic_int_get(atomic) \
91   (G_GNUC_EXTENSION ({                                                       \
92     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
93     gint gaig_temp;                                                          \
94     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
95     __atomic_load ((gint *)(atomic), &gaig_temp, __ATOMIC_SEQ_CST);          \
96     (gint) gaig_temp;                                                        \
97   }))
98 #define g_atomic_int_set(atomic, newval) \
99   (G_GNUC_EXTENSION ({                                                       \
100     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
101     gint gais_temp = (gint) (newval);                                        \
102     (void) (0 ? *(atomic) ^ (newval) : 1);                                   \
103     __atomic_store ((gint *)(atomic), &gais_temp, __ATOMIC_SEQ_CST);         \
104   }))
105
106 #if defined(g_has_typeof)
107 #define g_atomic_pointer_get(atomic) \
108   (G_GNUC_EXTENSION ({                                                       \
109     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
110     __typeof__(*(atomic)) gapg_temp_newval;                                  \
111     __typeof__((atomic)) gapg_temp_atomic = (atomic);                        \
112     __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST);   \
113     gapg_temp_newval;                                                        \
114   }))
115 #define g_atomic_pointer_set(atomic, newval) \
116   (G_GNUC_EXTENSION ({                                                       \
117     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
118     __typeof__((atomic)) gaps_temp_atomic = (atomic);                        \
119     __typeof__(*(atomic)) gaps_temp_newval = (newval);                       \
120     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
121     __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST);  \
122   }))
123 #else  /* if !defined(g_has_typeof) */
124 #define g_atomic_pointer_get(atomic) \
125   (G_GNUC_EXTENSION ({                                                       \
126     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
127     gpointer gapg_temp_newval;                                               \
128     gpointer *gapg_temp_atomic = (gpointer *)(atomic);                       \
129     __atomic_load (gapg_temp_atomic, &gapg_temp_newval, __ATOMIC_SEQ_CST);   \
130     gapg_temp_newval;                                                        \
131   }))
132 #define g_atomic_pointer_set(atomic, newval) \
133   (G_GNUC_EXTENSION ({                                                       \
134     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
135     gpointer *gaps_temp_atomic = (gpointer *)(atomic);                       \
136     gpointer gaps_temp_newval = (gpointer)(newval);                          \
137     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
138     __atomic_store (gaps_temp_atomic, &gaps_temp_newval, __ATOMIC_SEQ_CST);  \
139   }))
140 #endif  /* !defined(g_has_typeof) */
141
142 #define g_atomic_int_inc(atomic) \
143   (G_GNUC_EXTENSION ({                                                       \
144     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
145     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
146     (void) __atomic_fetch_add ((atomic), 1, __ATOMIC_SEQ_CST);               \
147   }))
148 #define g_atomic_int_dec_and_test(atomic) \
149   (G_GNUC_EXTENSION ({                                                       \
150     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
151     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
152     __atomic_fetch_sub ((atomic), 1, __ATOMIC_SEQ_CST) == 1;                 \
153   }))
154 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
155   (G_GNUC_EXTENSION ({                                                       \
156     gint gaicae_oldval = (oldval);                                           \
157     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
158     (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1);                        \
159     __atomic_compare_exchange_n ((atomic), &gaicae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
160   }))
161 #define g_atomic_int_add(atomic, val) \
162   (G_GNUC_EXTENSION ({                                                       \
163     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
164     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
165     (gint) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST);           \
166   }))
167 #define g_atomic_int_and(atomic, val) \
168   (G_GNUC_EXTENSION ({                                                       \
169     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
170     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
171     (guint) __atomic_fetch_and ((atomic), (val), __ATOMIC_SEQ_CST);          \
172   }))
173 #define g_atomic_int_or(atomic, val) \
174   (G_GNUC_EXTENSION ({                                                       \
175     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
176     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
177     (guint) __atomic_fetch_or ((atomic), (val), __ATOMIC_SEQ_CST);           \
178   }))
179 #define g_atomic_int_xor(atomic, val) \
180   (G_GNUC_EXTENSION ({                                                       \
181     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
182     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
183     (guint) __atomic_fetch_xor ((atomic), (val), __ATOMIC_SEQ_CST);          \
184   }))
185
186 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
187   (G_GNUC_EXTENSION ({                                                       \
188     G_STATIC_ASSERT (sizeof (oldval) == sizeof (gpointer));                  \
189     __typeof__ ((oldval)) gapcae_oldval = (oldval);                          \
190     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
191     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
192     __atomic_compare_exchange_n ((atomic), &gapcae_oldval, (newval), FALSE, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST) ? TRUE : FALSE; \
193   }))
194 #define g_atomic_pointer_add(atomic, val) \
195   (G_GNUC_EXTENSION ({                                                       \
196     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
197     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
198     (void) (0 ? (val) ^ (val) : 1);                                          \
199     (gssize) __atomic_fetch_add ((atomic), (val), __ATOMIC_SEQ_CST);         \
200   }))
201 #define g_atomic_pointer_and(atomic, val) \
202   (G_GNUC_EXTENSION ({                                                       \
203     volatile gsize *gapa_atomic = (volatile gsize *) (atomic);               \
204     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
205     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize));                    \
206     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
207     (void) (0 ? (val) ^ (val) : 1);                                          \
208     (gsize) __atomic_fetch_and (gapa_atomic, (val), __ATOMIC_SEQ_CST);       \
209   }))
210 #define g_atomic_pointer_or(atomic, val) \
211   (G_GNUC_EXTENSION ({                                                       \
212     volatile gsize *gapo_atomic = (volatile gsize *) (atomic);               \
213     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
214     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize));                    \
215     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
216     (void) (0 ? (val) ^ (val) : 1);                                          \
217     (gsize) __atomic_fetch_or (gapo_atomic, (val), __ATOMIC_SEQ_CST);        \
218   }))
219 #define g_atomic_pointer_xor(atomic, val) \
220   (G_GNUC_EXTENSION ({                                                       \
221     volatile gsize *gapx_atomic = (volatile gsize *) (atomic);               \
222     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
223     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gsize));                    \
224     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
225     (void) (0 ? (val) ^ (val) : 1);                                          \
226     (gsize) __atomic_fetch_xor (gapx_atomic, (val), __ATOMIC_SEQ_CST);       \
227   }))
228
229 #else /* defined(__ATOMIC_SEQ_CST) */
230
231 /* We want to achieve __ATOMIC_SEQ_CST semantics here. See
232  * https://en.cppreference.com/w/c/atomic/memory_order#Constants. For load
233  * operations, that means performing an *acquire*:
234  * > A load operation with this memory order performs the acquire operation on
235  * > the affected memory location: no reads or writes in the current thread can
236  * > be reordered before this load. All writes in other threads that release
237  * > the same atomic variable are visible in the current thread.
238  *
239  * “no reads or writes in the current thread can be reordered before this load”
240  * is implemented using a compiler barrier (a no-op `__asm__` section) to
241  * prevent instruction reordering. Writes in other threads are synchronised
242  * using `__sync_synchronize()`. It’s unclear from the GCC documentation whether
243  * `__sync_synchronize()` acts as a compiler barrier, hence our explicit use of
244  * one.
245  *
246  * For store operations, `__ATOMIC_SEQ_CST` means performing a *release*:
247  * > A store operation with this memory order performs the release operation:
248  * > no reads or writes in the current thread can be reordered after this store.
249  * > All writes in the current thread are visible in other threads that acquire
250  * > the same atomic variable (see Release-Acquire ordering below) and writes
251  * > that carry a dependency into the atomic variable become visible in other
252  * > threads that consume the same atomic (see Release-Consume ordering below).
253  *
254  * “no reads or writes in the current thread can be reordered after this store”
255  * is implemented using a compiler barrier to prevent instruction reordering.
256  * “All writes in the current thread are visible in other threads” is implemented
257  * using `__sync_synchronize()`; similarly for “writes that carry a dependency”.
258  */
259 #define g_atomic_int_get(atomic) \
260   (G_GNUC_EXTENSION ({                                                       \
261     gint gaig_result;                                                        \
262     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
263     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
264     gaig_result = (gint) *(atomic);                                          \
265     __sync_synchronize ();                                                   \
266     __asm__ __volatile__ ("" : : : "memory");                                \
267     gaig_result;                                                             \
268   }))
269 #define g_atomic_int_set(atomic, newval) \
270   (G_GNUC_EXTENSION ({                                                       \
271     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
272     (void) (0 ? *(atomic) ^ (newval) : 1);                                   \
273     __sync_synchronize ();                                                   \
274     __asm__ __volatile__ ("" : : : "memory");                                \
275     *(atomic) = (newval);                                                    \
276   }))
277 #define g_atomic_pointer_get(atomic) \
278   (G_GNUC_EXTENSION ({                                                       \
279     gpointer gapg_result;                                                    \
280     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
281     gapg_result = (gpointer) *(atomic);                                      \
282     __sync_synchronize ();                                                   \
283     __asm__ __volatile__ ("" : : : "memory");                                \
284     gapg_result;                                                             \
285   }))
286 #define g_atomic_pointer_set(atomic, newval) \
287   (G_GNUC_EXTENSION ({                                                       \
288     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
289     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
290     __sync_synchronize ();                                                   \
291     __asm__ __volatile__ ("" : : : "memory");                                \
292     *(atomic) = (__typeof__ (*(atomic))) (gsize) (newval);                   \
293   }))
294
295 #define g_atomic_int_inc(atomic) \
296   (G_GNUC_EXTENSION ({                                                       \
297     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
298     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
299     (void) __sync_fetch_and_add ((atomic), 1);                               \
300   }))
301 #define g_atomic_int_dec_and_test(atomic) \
302   (G_GNUC_EXTENSION ({                                                       \
303     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
304     (void) (0 ? *(atomic) ^ *(atomic) : 1);                                  \
305     __sync_fetch_and_sub ((atomic), 1) == 1;                                 \
306   }))
307 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
308   (G_GNUC_EXTENSION ({                                                       \
309     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
310     (void) (0 ? *(atomic) ^ (newval) ^ (oldval) : 1);                        \
311     __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
312   }))
313 #define g_atomic_int_add(atomic, val) \
314   (G_GNUC_EXTENSION ({                                                       \
315     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
316     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
317     (gint) __sync_fetch_and_add ((atomic), (val));                           \
318   }))
319 #define g_atomic_int_and(atomic, val) \
320   (G_GNUC_EXTENSION ({                                                       \
321     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
322     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
323     (guint) __sync_fetch_and_and ((atomic), (val));                          \
324   }))
325 #define g_atomic_int_or(atomic, val) \
326   (G_GNUC_EXTENSION ({                                                       \
327     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
328     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
329     (guint) __sync_fetch_and_or ((atomic), (val));                           \
330   }))
331 #define g_atomic_int_xor(atomic, val) \
332   (G_GNUC_EXTENSION ({                                                       \
333     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gint));                     \
334     (void) (0 ? *(atomic) ^ (val) : 1);                                      \
335     (guint) __sync_fetch_and_xor ((atomic), (val));                          \
336   }))
337
338 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
339   (G_GNUC_EXTENSION ({                                                       \
340     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
341     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
342     __sync_bool_compare_and_swap ((atomic), (oldval), (newval)) ? TRUE : FALSE; \
343   }))
344 #define g_atomic_pointer_add(atomic, val) \
345   (G_GNUC_EXTENSION ({                                                       \
346     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
347     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
348     (void) (0 ? (val) ^ (val) : 1);                                          \
349     (gssize) __sync_fetch_and_add ((atomic), (val));                         \
350   }))
351 #define g_atomic_pointer_and(atomic, val) \
352   (G_GNUC_EXTENSION ({                                                       \
353     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
354     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
355     (void) (0 ? (val) ^ (val) : 1);                                          \
356     (gsize) __sync_fetch_and_and ((atomic), (val));                          \
357   }))
358 #define g_atomic_pointer_or(atomic, val) \
359   (G_GNUC_EXTENSION ({                                                       \
360     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
361     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
362     (void) (0 ? (val) ^ (val) : 1);                                          \
363     (gsize) __sync_fetch_and_or ((atomic), (val));                           \
364   }))
365 #define g_atomic_pointer_xor(atomic, val) \
366   (G_GNUC_EXTENSION ({                                                       \
367     G_STATIC_ASSERT (sizeof *(atomic) == sizeof (gpointer));                 \
368     (void) (0 ? (gpointer) *(atomic) : NULL);                                \
369     (void) (0 ? (val) ^ (val) : 1);                                          \
370     (gsize) __sync_fetch_and_xor ((atomic), (val));                          \
371   }))
372
373 #endif /* !defined(__ATOMIC_SEQ_CST) */
374
375 #else /* defined(G_ATOMIC_LOCK_FREE) && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) */
376
377 #define g_atomic_int_get(atomic) \
378   (g_atomic_int_get ((gint *) (atomic)))
379 #define g_atomic_int_set(atomic, newval) \
380   (g_atomic_int_set ((gint *) (atomic), (gint) (newval)))
381 #define g_atomic_int_compare_and_exchange(atomic, oldval, newval) \
382   (g_atomic_int_compare_and_exchange ((gint *) (atomic), (oldval), (newval)))
383 #define g_atomic_int_add(atomic, val) \
384   (g_atomic_int_add ((gint *) (atomic), (val)))
385 #define g_atomic_int_and(atomic, val) \
386   (g_atomic_int_and ((guint *) (atomic), (val)))
387 #define g_atomic_int_or(atomic, val) \
388   (g_atomic_int_or ((guint *) (atomic), (val)))
389 #define g_atomic_int_xor(atomic, val) \
390   (g_atomic_int_xor ((guint *) (atomic), (val)))
391 #define g_atomic_int_inc(atomic) \
392   (g_atomic_int_inc ((gint *) (atomic)))
393 #define g_atomic_int_dec_and_test(atomic) \
394   (g_atomic_int_dec_and_test ((gint *) (atomic)))
395
396 #define g_atomic_pointer_get(atomic) \
397   (g_atomic_pointer_get (atomic))
398 #define g_atomic_pointer_set(atomic, newval) \
399   (g_atomic_pointer_set ((atomic), (gpointer) (newval)))
400 #define g_atomic_pointer_compare_and_exchange(atomic, oldval, newval) \
401   (g_atomic_pointer_compare_and_exchange ((atomic), (gpointer) (oldval), (gpointer) (newval)))
402 #define g_atomic_pointer_add(atomic, val) \
403   (g_atomic_pointer_add ((atomic), (gssize) (val)))
404 #define g_atomic_pointer_and(atomic, val) \
405   (g_atomic_pointer_and ((atomic), (gsize) (val)))
406 #define g_atomic_pointer_or(atomic, val) \
407   (g_atomic_pointer_or ((atomic), (gsize) (val)))
408 #define g_atomic_pointer_xor(atomic, val) \
409   (g_atomic_pointer_xor ((atomic), (gsize) (val)))
410
411 #endif /* defined(__GNUC__) && defined(G_ATOMIC_OP_USE_GCC_BUILTINS) */
412
413 #endif /* __G_ATOMIC_H__ */