tst-getpw: Rewrite.
[platform/upstream/glibc.git] / include / atomic.h
1 /* Internal macros for atomic operations for GNU C Library.
2    Copyright (C) 2002-2015 Free Software Foundation, Inc.
3    This file is part of the GNU C Library.
4    Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
5
6    The GNU C Library is free software; you can redistribute it and/or
7    modify it under the terms of the GNU Lesser General Public
8    License as published by the Free Software Foundation; either
9    version 2.1 of the License, or (at your option) any later version.
10
11    The GNU C Library is distributed in the hope that it will be useful,
12    but WITHOUT ANY WARRANTY; without even the implied warranty of
13    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14    Lesser General Public License for more details.
15
16    You should have received a copy of the GNU Lesser General Public
17    License along with the GNU C Library; if not, see
18    <http://www.gnu.org/licenses/>.  */
19
20 #ifndef _ATOMIC_H
21 #define _ATOMIC_H       1
22
23 /* This header defines three types of macros:
24
25    - atomic arithmetic and logic operation on memory.  They all
26      have the prefix "atomic_".
27
28    - conditionally atomic operations of the same kinds.  These
29      always behave identical but can be faster when atomicity
30      is not really needed since only one thread has access to
31      the memory location.  In that case the code is slower in
32      the multi-thread case.  The interfaces have the prefix
33      "catomic_".
34
35    - support functions like barriers.  They also have the prefix
36      "atomic_".
37
38    Architectures must provide a few lowlevel macros (the compare
39    and exchange definitions).  All others are optional.  They
40    should only be provided if the architecture has specific
41    support for the operation.
42
43    As <atomic.h> macros are usually heavily nested and often use local
44    variables to make sure side-effects are evaluated properly, use for
45    macro local variables a per-macro unique prefix.  This file uses
46    __atgN_ prefix where N is different in each macro.  */
47
48 #include <stdlib.h>
49
50 #include <bits/atomic.h>
51
52 /* Wrapper macros to call pre_NN_post (mem, ...) where NN is the
53    bit width of *MEM.  The calling macro puts parens around MEM
54    and following args.  */
55 #define __atomic_val_bysize(pre, post, mem, ...)                              \
56   ({                                                                          \
57     __typeof (*mem) __atg1_result;                                            \
58     if (sizeof (*mem) == 1)                                                   \
59       __atg1_result = pre##_8_##post (mem, __VA_ARGS__);                      \
60     else if (sizeof (*mem) == 2)                                              \
61       __atg1_result = pre##_16_##post (mem, __VA_ARGS__);                     \
62     else if (sizeof (*mem) == 4)                                              \
63       __atg1_result = pre##_32_##post (mem, __VA_ARGS__);                     \
64     else if (sizeof (*mem) == 8)                                              \
65       __atg1_result = pre##_64_##post (mem, __VA_ARGS__);                     \
66     else                                                                      \
67       abort ();                                                               \
68     __atg1_result;                                                            \
69   })
70 #define __atomic_bool_bysize(pre, post, mem, ...)                             \
71   ({                                                                          \
72     int __atg2_result;                                                        \
73     if (sizeof (*mem) == 1)                                                   \
74       __atg2_result = pre##_8_##post (mem, __VA_ARGS__);                      \
75     else if (sizeof (*mem) == 2)                                              \
76       __atg2_result = pre##_16_##post (mem, __VA_ARGS__);                     \
77     else if (sizeof (*mem) == 4)                                              \
78       __atg2_result = pre##_32_##post (mem, __VA_ARGS__);                     \
79     else if (sizeof (*mem) == 8)                                              \
80       __atg2_result = pre##_64_##post (mem, __VA_ARGS__);                     \
81     else                                                                      \
82       abort ();                                                               \
83     __atg2_result;                                                            \
84   })
85
86
87 /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
88    Return the old *MEM value.  */
89 #if !defined atomic_compare_and_exchange_val_acq \
90     && defined __arch_compare_and_exchange_val_32_acq
91 # define atomic_compare_and_exchange_val_acq(mem, newval, oldval) \
92   __atomic_val_bysize (__arch_compare_and_exchange_val,acq,                   \
93                        mem, newval, oldval)
94 #endif
95
96
97 #ifndef catomic_compare_and_exchange_val_acq
98 # ifdef __arch_c_compare_and_exchange_val_32_acq
99 #  define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
100   __atomic_val_bysize (__arch_c_compare_and_exchange_val,acq,                 \
101                        mem, newval, oldval)
102 # else
103 #  define catomic_compare_and_exchange_val_acq(mem, newval, oldval) \
104   atomic_compare_and_exchange_val_acq (mem, newval, oldval)
105 # endif
106 #endif
107
108
109 #ifndef catomic_compare_and_exchange_val_rel
110 # ifndef atomic_compare_and_exchange_val_rel
111 #  define catomic_compare_and_exchange_val_rel(mem, newval, oldval)           \
112   catomic_compare_and_exchange_val_acq (mem, newval, oldval)
113 # else
114 #  define catomic_compare_and_exchange_val_rel(mem, newval, oldval)           \
115   atomic_compare_and_exchange_val_rel (mem, newval, oldval)
116 # endif
117 #endif
118
119
120 #ifndef atomic_compare_and_exchange_val_rel
121 # define atomic_compare_and_exchange_val_rel(mem, newval, oldval)             \
122   atomic_compare_and_exchange_val_acq (mem, newval, oldval)
123 #endif
124
125
126 /* Atomically store NEWVAL in *MEM if *MEM is equal to OLDVAL.
127    Return zero if *MEM was changed or non-zero if no exchange happened.  */
128 #ifndef atomic_compare_and_exchange_bool_acq
129 # ifdef __arch_compare_and_exchange_bool_32_acq
130 #  define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
131   __atomic_bool_bysize (__arch_compare_and_exchange_bool,acq,                 \
132                         mem, newval, oldval)
133 # else
134 #  define atomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
135   ({ /* Cannot use __oldval here, because macros later in this file might     \
136         call this macro with __oldval argument.  */                           \
137      __typeof (oldval) __atg3_old = (oldval);                                 \
138      atomic_compare_and_exchange_val_acq (mem, newval, __atg3_old)            \
139        != __atg3_old;                                                         \
140   })
141 # endif
142 #endif
143
144
145 #ifndef catomic_compare_and_exchange_bool_acq
146 # ifdef __arch_c_compare_and_exchange_bool_32_acq
147 #  define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
148   __atomic_bool_bysize (__arch_c_compare_and_exchange_bool,acq,               \
149                         mem, newval, oldval)
150 # else
151 #  define catomic_compare_and_exchange_bool_acq(mem, newval, oldval) \
152   ({ /* Cannot use __oldval here, because macros later in this file might     \
153         call this macro with __oldval argument.  */                           \
154      __typeof (oldval) __atg4_old = (oldval);                                 \
155      catomic_compare_and_exchange_val_acq (mem, newval, __atg4_old)           \
156        != __atg4_old;                                                         \
157   })
158 # endif
159 #endif
160
161
162 #ifndef catomic_compare_and_exchange_bool_rel
163 # ifndef atomic_compare_and_exchange_bool_rel
164 #  define catomic_compare_and_exchange_bool_rel(mem, newval, oldval)          \
165   catomic_compare_and_exchange_bool_acq (mem, newval, oldval)
166 # else
167 #  define catomic_compare_and_exchange_bool_rel(mem, newval, oldval)          \
168   atomic_compare_and_exchange_bool_rel (mem, newval, oldval)
169 # endif
170 #endif
171
172
173 #ifndef atomic_compare_and_exchange_bool_rel
174 # define atomic_compare_and_exchange_bool_rel(mem, newval, oldval) \
175   atomic_compare_and_exchange_bool_acq (mem, newval, oldval)
176 #endif
177
178
179 /* Store NEWVALUE in *MEM and return the old value.  */
180 #ifndef atomic_exchange_acq
181 # define atomic_exchange_acq(mem, newvalue) \
182   ({ __typeof (*(mem)) __atg5_oldval;                                         \
183      __typeof (mem) __atg5_memp = (mem);                                      \
184      __typeof (*(mem)) __atg5_value = (newvalue);                             \
185                                                                               \
186      do                                                                       \
187        __atg5_oldval = *__atg5_memp;                                          \
188      while (__builtin_expect                                                  \
189             (atomic_compare_and_exchange_bool_acq (__atg5_memp, __atg5_value, \
190                                                    __atg5_oldval), 0));       \
191                                                                               \
192      __atg5_oldval; })
193 #endif
194
195 #ifndef atomic_exchange_rel
196 # define atomic_exchange_rel(mem, newvalue) atomic_exchange_acq (mem, newvalue)
197 #endif
198
199
200 /* Add VALUE to *MEM and return the old value of *MEM.  */
201 #ifndef atomic_exchange_and_add_acq
202 # ifdef atomic_exchange_and_add
203 #  define atomic_exchange_and_add_acq(mem, value) \
204   atomic_exchange_and_add (mem, value)
205 # else
206 #  define atomic_exchange_and_add_acq(mem, value) \
207   ({ __typeof (*(mem)) __atg6_oldval;                                         \
208      __typeof (mem) __atg6_memp = (mem);                                      \
209      __typeof (*(mem)) __atg6_value = (value);                                \
210                                                                               \
211      do                                                                       \
212        __atg6_oldval = *__atg6_memp;                                          \
213      while (__builtin_expect                                                  \
214             (atomic_compare_and_exchange_bool_acq (__atg6_memp,               \
215                                                    __atg6_oldval              \
216                                                    + __atg6_value,            \
217                                                    __atg6_oldval), 0));       \
218                                                                               \
219      __atg6_oldval; })
220 # endif
221 #endif
222
223 #ifndef atomic_exchange_and_add_rel
224 # define atomic_exchange_and_add_rel(mem, value) \
225   atomic_exchange_and_add_acq(mem, value)
226 #endif
227
228 #ifndef atomic_exchange_and_add
229 # define atomic_exchange_and_add(mem, value) \
230   atomic_exchange_and_add_acq(mem, value)
231 #endif
232
233 #ifndef catomic_exchange_and_add
234 # define catomic_exchange_and_add(mem, value) \
235   ({ __typeof (*(mem)) __atg7_oldv;                                           \
236      __typeof (mem) __atg7_memp = (mem);                                      \
237      __typeof (*(mem)) __atg7_value = (value);                                \
238                                                                               \
239      do                                                                       \
240        __atg7_oldv = *__atg7_memp;                                            \
241      while (__builtin_expect                                                  \
242             (catomic_compare_and_exchange_bool_acq (__atg7_memp,              \
243                                                     __atg7_oldv               \
244                                                     + __atg7_value,           \
245                                                     __atg7_oldv), 0));        \
246                                                                               \
247      __atg7_oldv; })
248 #endif
249
250
251 #ifndef atomic_max
252 # define atomic_max(mem, value) \
253   do {                                                                        \
254     __typeof (*(mem)) __atg8_oldval;                                          \
255     __typeof (mem) __atg8_memp = (mem);                                       \
256     __typeof (*(mem)) __atg8_value = (value);                                 \
257     do {                                                                      \
258       __atg8_oldval = *__atg8_memp;                                           \
259       if (__atg8_oldval >= __atg8_value)                                      \
260         break;                                                                \
261     } while (__builtin_expect                                                 \
262              (atomic_compare_and_exchange_bool_acq (__atg8_memp, __atg8_value,\
263                                                     __atg8_oldval), 0));      \
264   } while (0)
265 #endif
266
267
268 #ifndef catomic_max
269 # define catomic_max(mem, value) \
270   do {                                                                        \
271     __typeof (*(mem)) __atg9_oldv;                                            \
272     __typeof (mem) __atg9_memp = (mem);                                       \
273     __typeof (*(mem)) __atg9_value = (value);                                 \
274     do {                                                                      \
275       __atg9_oldv = *__atg9_memp;                                             \
276       if (__atg9_oldv >= __atg9_value)                                        \
277         break;                                                                \
278     } while (__builtin_expect                                                 \
279              (catomic_compare_and_exchange_bool_acq (__atg9_memp,             \
280                                                      __atg9_value,            \
281                                                      __atg9_oldv), 0));       \
282   } while (0)
283 #endif
284
285
286 #ifndef atomic_min
287 # define atomic_min(mem, value) \
288   do {                                                                        \
289     __typeof (*(mem)) __atg10_oldval;                                         \
290     __typeof (mem) __atg10_memp = (mem);                                      \
291     __typeof (*(mem)) __atg10_value = (value);                                \
292     do {                                                                      \
293       __atg10_oldval = *__atg10_memp;                                         \
294       if (__atg10_oldval <= __atg10_value)                                    \
295         break;                                                                \
296     } while (__builtin_expect                                                 \
297              (atomic_compare_and_exchange_bool_acq (__atg10_memp,             \
298                                                     __atg10_value,            \
299                                                     __atg10_oldval), 0));     \
300   } while (0)
301 #endif
302
303
304 #ifndef atomic_add
305 # define atomic_add(mem, value) (void) atomic_exchange_and_add ((mem), (value))
306 #endif
307
308
309 #ifndef catomic_add
310 # define catomic_add(mem, value) \
311   (void) catomic_exchange_and_add ((mem), (value))
312 #endif
313
314
315 #ifndef atomic_increment
316 # define atomic_increment(mem) atomic_add ((mem), 1)
317 #endif
318
319
320 #ifndef catomic_increment
321 # define catomic_increment(mem) catomic_add ((mem), 1)
322 #endif
323
324
325 #ifndef atomic_increment_val
326 # define atomic_increment_val(mem) (atomic_exchange_and_add ((mem), 1) + 1)
327 #endif
328
329
330 #ifndef catomic_increment_val
331 # define catomic_increment_val(mem) (catomic_exchange_and_add ((mem), 1) + 1)
332 #endif
333
334
335 /* Add one to *MEM and return true iff it's now zero.  */
336 #ifndef atomic_increment_and_test
337 # define atomic_increment_and_test(mem) \
338   (atomic_exchange_and_add ((mem), 1) + 1 == 0)
339 #endif
340
341
342 #ifndef atomic_decrement
343 # define atomic_decrement(mem) atomic_add ((mem), -1)
344 #endif
345
346
347 #ifndef catomic_decrement
348 # define catomic_decrement(mem) catomic_add ((mem), -1)
349 #endif
350
351
352 #ifndef atomic_decrement_val
353 # define atomic_decrement_val(mem) (atomic_exchange_and_add ((mem), -1) - 1)
354 #endif
355
356
357 #ifndef catomic_decrement_val
358 # define catomic_decrement_val(mem) (catomic_exchange_and_add ((mem), -1) - 1)
359 #endif
360
361
362 /* Subtract 1 from *MEM and return true iff it's now zero.  */
363 #ifndef atomic_decrement_and_test
364 # define atomic_decrement_and_test(mem) \
365   (atomic_exchange_and_add ((mem), -1) == 1)
366 #endif
367
368
369 /* Decrement *MEM if it is > 0, and return the old value.  */
370 #ifndef atomic_decrement_if_positive
371 # define atomic_decrement_if_positive(mem) \
372   ({ __typeof (*(mem)) __atg11_oldval;                                        \
373      __typeof (mem) __atg11_memp = (mem);                                     \
374                                                                               \
375      do                                                                       \
376        {                                                                      \
377          __atg11_oldval = *__atg11_memp;                                      \
378          if (__glibc_unlikely (__atg11_oldval <= 0))                          \
379            break;                                                             \
380        }                                                                      \
381      while (__builtin_expect                                                  \
382             (atomic_compare_and_exchange_bool_acq (__atg11_memp,              \
383                                                    __atg11_oldval - 1,        \
384                                                    __atg11_oldval), 0));      \
385      __atg11_oldval; })
386 #endif
387
388
389 #ifndef atomic_add_negative
390 # define atomic_add_negative(mem, value)                                      \
391   ({ __typeof (value) __atg12_value = (value);                                \
392      atomic_exchange_and_add (mem, __atg12_value) < -__atg12_value; })
393 #endif
394
395
396 #ifndef atomic_add_zero
397 # define atomic_add_zero(mem, value)                                          \
398   ({ __typeof (value) __atg13_value = (value);                                \
399      atomic_exchange_and_add (mem, __atg13_value) == -__atg13_value; })
400 #endif
401
402
403 #ifndef atomic_bit_set
404 # define atomic_bit_set(mem, bit) \
405   (void) atomic_bit_test_set(mem, bit)
406 #endif
407
408
409 #ifndef atomic_bit_test_set
410 # define atomic_bit_test_set(mem, bit) \
411   ({ __typeof (*(mem)) __atg14_old;                                           \
412      __typeof (mem) __atg14_memp = (mem);                                     \
413      __typeof (*(mem)) __atg14_mask = ((__typeof (*(mem))) 1 << (bit));       \
414                                                                               \
415      do                                                                       \
416        __atg14_old = (*__atg14_memp);                                         \
417      while (__builtin_expect                                                  \
418             (atomic_compare_and_exchange_bool_acq (__atg14_memp,              \
419                                                    __atg14_old | __atg14_mask,\
420                                                    __atg14_old), 0));         \
421                                                                               \
422      __atg14_old & __atg14_mask; })
423 #endif
424
425 /* Atomically *mem &= mask.  */
426 #ifndef atomic_and
427 # define atomic_and(mem, mask) \
428   do {                                                                        \
429     __typeof (*(mem)) __atg15_old;                                            \
430     __typeof (mem) __atg15_memp = (mem);                                      \
431     __typeof (*(mem)) __atg15_mask = (mask);                                  \
432                                                                               \
433     do                                                                        \
434       __atg15_old = (*__atg15_memp);                                          \
435     while (__builtin_expect                                                   \
436            (atomic_compare_and_exchange_bool_acq (__atg15_memp,               \
437                                                   __atg15_old & __atg15_mask, \
438                                                   __atg15_old), 0));          \
439   } while (0)
440 #endif
441
442 #ifndef catomic_and
443 # define catomic_and(mem, mask) \
444   do {                                                                        \
445     __typeof (*(mem)) __atg20_old;                                            \
446     __typeof (mem) __atg20_memp = (mem);                                      \
447     __typeof (*(mem)) __atg20_mask = (mask);                                  \
448                                                                               \
449     do                                                                        \
450       __atg20_old = (*__atg20_memp);                                          \
451     while (__builtin_expect                                                   \
452            (catomic_compare_and_exchange_bool_acq (__atg20_memp,              \
453                                                    __atg20_old & __atg20_mask,\
454                                                    __atg20_old), 0));         \
455   } while (0)
456 #endif
457
458 /* Atomically *mem &= mask and return the old value of *mem.  */
459 #ifndef atomic_and_val
460 # define atomic_and_val(mem, mask) \
461   ({ __typeof (*(mem)) __atg16_old;                                           \
462      __typeof (mem) __atg16_memp = (mem);                                     \
463      __typeof (*(mem)) __atg16_mask = (mask);                                 \
464                                                                               \
465      do                                                                       \
466        __atg16_old = (*__atg16_memp);                                         \
467      while (__builtin_expect                                                  \
468             (atomic_compare_and_exchange_bool_acq (__atg16_memp,              \
469                                                    __atg16_old & __atg16_mask,\
470                                                    __atg16_old), 0));         \
471                                                                               \
472      __atg16_old; })
473 #endif
474
475 /* Atomically *mem |= mask and return the old value of *mem.  */
476 #ifndef atomic_or
477 # define atomic_or(mem, mask) \
478   do {                                                                        \
479     __typeof (*(mem)) __atg17_old;                                            \
480     __typeof (mem) __atg17_memp = (mem);                                      \
481     __typeof (*(mem)) __atg17_mask = (mask);                                  \
482                                                                               \
483     do                                                                        \
484       __atg17_old = (*__atg17_memp);                                          \
485     while (__builtin_expect                                                   \
486            (atomic_compare_and_exchange_bool_acq (__atg17_memp,               \
487                                                   __atg17_old | __atg17_mask, \
488                                                   __atg17_old), 0));          \
489   } while (0)
490 #endif
491
492 #ifndef catomic_or
493 # define catomic_or(mem, mask) \
494   do {                                                                        \
495     __typeof (*(mem)) __atg18_old;                                            \
496     __typeof (mem) __atg18_memp = (mem);                                      \
497     __typeof (*(mem)) __atg18_mask = (mask);                                  \
498                                                                               \
499     do                                                                        \
500       __atg18_old = (*__atg18_memp);                                          \
501     while (__builtin_expect                                                   \
502            (catomic_compare_and_exchange_bool_acq (__atg18_memp,              \
503                                                    __atg18_old | __atg18_mask,\
504                                                    __atg18_old), 0));         \
505   } while (0)
506 #endif
507
508 /* Atomically *mem |= mask and return the old value of *mem.  */
509 #ifndef atomic_or_val
510 # define atomic_or_val(mem, mask) \
511   ({ __typeof (*(mem)) __atg19_old;                                           \
512      __typeof (mem) __atg19_memp = (mem);                                     \
513      __typeof (*(mem)) __atg19_mask = (mask);                                 \
514                                                                               \
515      do                                                                       \
516        __atg19_old = (*__atg19_memp);                                         \
517      while (__builtin_expect                                                  \
518             (atomic_compare_and_exchange_bool_acq (__atg19_memp,              \
519                                                    __atg19_old | __atg19_mask,\
520                                                    __atg19_old), 0));         \
521                                                                               \
522      __atg19_old; })
523 #endif
524
525 #ifndef atomic_full_barrier
526 # define atomic_full_barrier() __asm ("" ::: "memory")
527 #endif
528
529
530 #ifndef atomic_read_barrier
531 # define atomic_read_barrier() atomic_full_barrier ()
532 #endif
533
534
535 #ifndef atomic_write_barrier
536 # define atomic_write_barrier() atomic_full_barrier ()
537 #endif
538
539
540 #ifndef atomic_forced_read
541 # define atomic_forced_read(x) \
542   ({ __typeof (x) __x; __asm ("" : "=r" (__x) : "0" (x)); __x; })
543 #endif
544
545 /* This is equal to 1 iff the architecture supports 64b atomic operations.  */
546 #ifndef __HAVE_64B_ATOMICS
547 #error Unable to determine if 64-bit atomics are present.
548 #endif
549
550 /* The following functions are a subset of the atomic operations provided by
551    C11.  Usually, a function named atomic_OP_MO(args) is equivalent to C11's
552    atomic_OP_explicit(args, memory_order_MO); exceptions noted below.  */
553
554 /* Each arch can request to use compiler built-ins for C11 atomics.  If it
555    does, all atomics will be based on these.  */
556 #if USE_ATOMIC_COMPILER_BUILTINS
557
558 /* We require 32b atomic operations; some archs also support 64b atomic
559    operations.  */
560 void __atomic_link_error (void);
561 # if __HAVE_64B_ATOMICS == 1
562 #  define __atomic_check_size(mem) \
563    if ((sizeof (*mem) != 4) && (sizeof (*mem) != 8))                          \
564      __atomic_link_error ();
565 # else
566 #  define __atomic_check_size(mem) \
567    if (sizeof (*mem) != 4)                                                    \
568      __atomic_link_error ();
569 # endif
570
571 # define atomic_thread_fence_acquire() \
572   __atomic_thread_fence (__ATOMIC_ACQUIRE)
573 # define atomic_thread_fence_release() \
574   __atomic_thread_fence (__ATOMIC_RELEASE)
575 # define atomic_thread_fence_seq_cst() \
576   __atomic_thread_fence (__ATOMIC_SEQ_CST)
577
578 # define atomic_load_relaxed(mem) \
579   ({ __atomic_check_size((mem)); __atomic_load_n ((mem), __ATOMIC_RELAXED); })
580 # define atomic_load_acquire(mem) \
581   ({ __atomic_check_size((mem)); __atomic_load_n ((mem), __ATOMIC_ACQUIRE); })
582
583 # define atomic_store_relaxed(mem, val) \
584   do {                                                                        \
585     __atomic_check_size((mem));                                               \
586     __atomic_store_n ((mem), (val), __ATOMIC_RELAXED);                        \
587   } while (0)
588 # define atomic_store_release(mem, val) \
589   do {                                                                        \
590     __atomic_check_size((mem));                                               \
591     __atomic_store_n ((mem), (val), __ATOMIC_RELEASE);                        \
592   } while (0)
593
594 /* On failure, this CAS has memory_order_relaxed semantics.  */
595 # define atomic_compare_exchange_weak_relaxed(mem, expected, desired) \
596   ({ __atomic_check_size((mem));                                              \
597   __atomic_compare_exchange_n ((mem), (expected), (desired), 1,               \
598     __ATOMIC_RELAXED, __ATOMIC_RELAXED); })
599 # define atomic_compare_exchange_weak_acquire(mem, expected, desired) \
600   ({ __atomic_check_size((mem));                                              \
601   __atomic_compare_exchange_n ((mem), (expected), (desired), 1,               \
602     __ATOMIC_ACQUIRE, __ATOMIC_RELAXED); })
603 # define atomic_compare_exchange_weak_release(mem, expected, desired) \
604   ({ __atomic_check_size((mem));                                              \
605   __atomic_compare_exchange_n ((mem), (expected), (desired), 1,               \
606     __ATOMIC_RELEASE, __ATOMIC_RELAXED); })
607
608 # define atomic_exchange_acquire(mem, desired) \
609   ({ __atomic_check_size((mem));                                              \
610   __atomic_exchange_n ((mem), (desired), __ATOMIC_ACQUIRE); })
611 # define atomic_exchange_release(mem, desired) \
612   ({ __atomic_check_size((mem));                                              \
613   __atomic_exchange_n ((mem), (desired), __ATOMIC_RELEASE); })
614
615 # define atomic_fetch_add_relaxed(mem, operand) \
616   ({ __atomic_check_size((mem));                                              \
617   __atomic_fetch_add ((mem), (operand), __ATOMIC_RELAXED); })
618 # define atomic_fetch_add_acquire(mem, operand) \
619   ({ __atomic_check_size((mem));                                              \
620   __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQUIRE); })
621 # define atomic_fetch_add_release(mem, operand) \
622   ({ __atomic_check_size((mem));                                              \
623   __atomic_fetch_add ((mem), (operand), __ATOMIC_RELEASE); })
624 # define atomic_fetch_add_acq_rel(mem, operand) \
625   ({ __atomic_check_size((mem));                                              \
626   __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQ_REL); })
627
628 # define atomic_fetch_and_acquire(mem, operand) \
629   ({ __atomic_check_size((mem));                                              \
630   __atomic_fetch_and ((mem), (operand), __ATOMIC_ACQUIRE); })
631
632 # define atomic_fetch_or_relaxed(mem, operand) \
633   ({ __atomic_check_size((mem));                                              \
634   __atomic_fetch_or ((mem), (operand), __ATOMIC_RELAXED); })
635 # define atomic_fetch_or_acquire(mem, operand) \
636   ({ __atomic_check_size((mem));                                              \
637   __atomic_fetch_or ((mem), (operand), __ATOMIC_ACQUIRE); })
638
639 #else /* !USE_ATOMIC_COMPILER_BUILTINS  */
640
641 /* By default, we assume that read, write, and full barriers are equivalent
642    to acquire, release, and seq_cst barriers.  Archs for which this does not
643    hold have to provide custom definitions of the fences.  */
644 # ifndef atomic_thread_fence_acquire
645 #  define atomic_thread_fence_acquire() atomic_read_barrier ()
646 # endif
647 # ifndef atomic_thread_fence_release
648 #  define atomic_thread_fence_release() atomic_write_barrier ()
649 # endif
650 # ifndef atomic_thread_fence_seq_cst
651 #  define atomic_thread_fence_seq_cst() atomic_full_barrier ()
652 # endif
653
654 # ifndef atomic_load_relaxed
655 #  define atomic_load_relaxed(mem) \
656    ({ __typeof (*(mem)) __atg100_val;                                         \
657    __asm ("" : "=r" (__atg100_val) : "0" (*(mem)));                           \
658    __atg100_val; })
659 # endif
660 # ifndef atomic_load_acquire
661 #  define atomic_load_acquire(mem) \
662    ({ __typeof (*(mem)) __atg101_val = atomic_load_relaxed (mem);             \
663    atomic_thread_fence_acquire ();                                            \
664    __atg101_val; })
665 # endif
666
667 # ifndef atomic_store_relaxed
668 /* XXX Use inline asm here?  */
669 #  define atomic_store_relaxed(mem, val) do { *(mem) = (val); } while (0)
670 # endif
671 # ifndef atomic_store_release
672 #  define atomic_store_release(mem, val) \
673    do {                                                                       \
674      atomic_thread_fence_release ();                                          \
675      atomic_store_relaxed ((mem), (val));                                     \
676    } while (0)
677 # endif
678
679 /* On failure, this CAS has memory_order_relaxed semantics.  */
680 /* XXX This potentially has one branch more than necessary, but archs
681    currently do not define a CAS that returns both the previous value and
682    the success flag.  */
683 # ifndef atomic_compare_exchange_weak_acquire
684 #  define atomic_compare_exchange_weak_acquire(mem, expected, desired) \
685    ({ typeof (*(expected)) __atg102_expected = *(expected);                   \
686    *(expected) =                                                              \
687      atomic_compare_and_exchange_val_acq ((mem), (desired), *(expected));     \
688    *(expected) == __atg102_expected; })
689 # endif
690 # ifndef atomic_compare_exchange_weak_relaxed
691 /* XXX Fall back to CAS with acquire MO because archs do not define a weaker
692    CAS.  */
693 #  define atomic_compare_exchange_weak_relaxed(mem, expected, desired) \
694    atomic_compare_exchange_weak_acquire ((mem), (expected), (desired))
695 # endif
696 # ifndef atomic_compare_exchange_weak_release
697 #  define atomic_compare_exchange_weak_release(mem, expected, desired) \
698    ({ typeof (*(expected)) __atg103_expected = *(expected);                   \
699    *(expected) =                                                              \
700      atomic_compare_and_exchange_val_rel ((mem), (desired), *(expected));     \
701    *(expected) == __atg103_expected; })
702 # endif
703
704 # ifndef atomic_exchange_acquire
705 #  define atomic_exchange_acquire(mem, val) \
706    atomic_exchange_acq ((mem), (val))
707 # endif
708 # ifndef atomic_exchange_release
709 #  define atomic_exchange_release(mem, val) \
710    atomic_exchange_rel ((mem), (val))
711 # endif
712
713 # ifndef atomic_fetch_add_acquire
714 #  define atomic_fetch_add_acquire(mem, operand) \
715    atomic_exchange_and_add_acq ((mem), (operand))
716 # endif
717 # ifndef atomic_fetch_add_relaxed
718 /* XXX Fall back to acquire MO because the MO semantics of
719    atomic_exchange_and_add are not documented; the generic version falls back
720    to atomic_exchange_and_add_acq if atomic_exchange_and_add is not defined,
721    and vice versa.  */
722 #  define atomic_fetch_add_relaxed(mem, operand) \
723    atomic_fetch_add_acquire ((mem), (operand))
724 # endif
725 # ifndef atomic_fetch_add_release
726 #  define atomic_fetch_add_release(mem, operand) \
727    atomic_exchange_and_add_rel ((mem), (operand))
728 # endif
729 # ifndef atomic_fetch_add_acq_rel
730 #  define atomic_fetch_add_acq_rel(mem, operand) \
731    ({ atomic_thread_fence_release ();                                         \
732    atomic_exchange_and_add_acq ((mem), (operand)); })
733 # endif
734
735 /* XXX The default for atomic_and_val has acquire semantics, but this is not
736    documented.  */
737 # ifndef atomic_fetch_and_acquire
738 #  define atomic_fetch_and_acquire(mem, operand) \
739    atomic_and_val ((mem), (operand))
740 # endif
741
742 /* XXX The default for atomic_or_val has acquire semantics, but this is not
743    documented.  */
744 # ifndef atomic_fetch_or_acquire
745 #  define atomic_fetch_or_acquire(mem, operand) \
746    atomic_or_val ((mem), (operand))
747 # endif
748 /* XXX Fall back to acquire MO because archs do not define a weaker
749    atomic_or_val.  */
750 # ifndef atomic_fetch_or_relaxed
751 #  define atomic_fetch_or_relaxed(mem, operand) \
752    atomic_fetch_or_acquire ((mem), (operand))
753 # endif
754
755 #endif /* !USE_ATOMIC_COMPILER_BUILTINS  */
756
757
758 #ifndef atomic_delay
759 # define atomic_delay() do { /* nothing */ } while (0)
760 #endif
761
762 #endif  /* atomic.h */