2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
4 * Copyright (c) 2003-2011 Hewlett-Packard Development Company, L.P.
5 * Copyright (c) 2013-2017 Ivan Maidanski
7 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
8 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
10 * Permission is hereby granted to use or copy this program
11 * for any purpose, provided the above notices are retained on all copies.
12 * Permission to modify the code and to distribute modified code is granted,
13 * provided the above notices are retained, and a notice that the code was
14 * modified is included with the above copyright notice.
18 /* The following implementation assumes GCC 4.7 or later. */
19 /* For the details, see GNU Manual, chapter 6.52 (Built-in functions */
20 /* for memory model aware atomic operations). */
22 #define AO_GCC_ATOMIC_TEST_AND_SET
23 #include "../test_and_set_t_is_char.h"
25 #if defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1) \
26 || defined(AO_GCC_FORCE_HAVE_CAS)
27 # define AO_GCC_HAVE_char_SYNC_CAS
30 #if (__SIZEOF_SHORT__ == 2 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2)) \
31 || defined(AO_GCC_FORCE_HAVE_CAS)
32 # define AO_GCC_HAVE_short_SYNC_CAS
35 #if (__SIZEOF_INT__ == 4 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)) \
36 || (__SIZEOF_INT__ == 8 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) \
37 || defined(AO_GCC_FORCE_HAVE_CAS)
38 # define AO_GCC_HAVE_int_SYNC_CAS
41 #if (__SIZEOF_SIZE_T__ == 4 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4)) \
42 || (__SIZEOF_SIZE_T__ == 8 \
43 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) \
44 || defined(AO_GCC_FORCE_HAVE_CAS)
45 # define AO_GCC_HAVE_SYNC_CAS
48 #undef AO_compiler_barrier
49 #define AO_compiler_barrier() __atomic_signal_fence(__ATOMIC_SEQ_CST)
51 #ifdef AO_UNIPROCESSOR
52 /* If only a single processor (core) is used, AO_UNIPROCESSOR could */
53 /* be defined by the client to avoid unnecessary memory barrier. */
57 AO_compiler_barrier();
59 # define AO_HAVE_nop_full
65 __atomic_thread_fence(__ATOMIC_ACQUIRE);
67 # define AO_HAVE_nop_read
69 # ifndef AO_HAVE_nop_write
73 __atomic_thread_fence(__ATOMIC_RELEASE);
75 # define AO_HAVE_nop_write
81 /* __sync_synchronize() could be used instead. */
82 __atomic_thread_fence(__ATOMIC_SEQ_CST);
84 # define AO_HAVE_nop_full
85 #endif /* !AO_UNIPROCESSOR */
87 #include "generic-small.h"
89 #ifndef AO_PREFER_GENERALIZED
90 # include "generic-arithm.h"
92 # define AO_CLEAR(addr) __atomic_clear(addr, __ATOMIC_RELEASE)
93 # define AO_HAVE_CLEAR
96 AO_test_and_set(volatile AO_TS_t *addr)
98 return (AO_TS_VAL_t)__atomic_test_and_set(addr, __ATOMIC_RELAXED);
100 # define AO_HAVE_test_and_set
102 AO_INLINE AO_TS_VAL_t
103 AO_test_and_set_acquire(volatile AO_TS_t *addr)
105 return (AO_TS_VAL_t)__atomic_test_and_set(addr, __ATOMIC_ACQUIRE);
107 # define AO_HAVE_test_and_set_acquire
109 AO_INLINE AO_TS_VAL_t
110 AO_test_and_set_release(volatile AO_TS_t *addr)
112 return (AO_TS_VAL_t)__atomic_test_and_set(addr, __ATOMIC_RELEASE);
114 # define AO_HAVE_test_and_set_release
116 AO_INLINE AO_TS_VAL_t
117 AO_test_and_set_full(volatile AO_TS_t *addr)
119 return (AO_TS_VAL_t)__atomic_test_and_set(addr, __ATOMIC_SEQ_CST);
121 # define AO_HAVE_test_and_set_full
122 #endif /* !AO_PREFER_GENERALIZED */
124 #ifdef AO_HAVE_DOUBLE_PTR_STORAGE
126 # if ((__SIZEOF_SIZE_T__ == 4 \
127 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) \
128 || (__SIZEOF_SIZE_T__ == 8 /* half of AO_double_t */ \
129 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16))) \
130 && !defined(AO_SKIPATOMIC_double_compare_and_swap_ANY)
131 # define AO_GCC_HAVE_double_SYNC_CAS
134 # if !defined(AO_GCC_HAVE_double_SYNC_CAS) || !defined(AO_PREFER_GENERALIZED)
136 # if !defined(AO_HAVE_double_load) && !defined(AO_SKIPATOMIC_double_load)
137 AO_INLINE AO_double_t
138 AO_double_load(const volatile AO_double_t *addr)
142 result.AO_whole = __atomic_load_n(&addr->AO_whole, __ATOMIC_RELAXED);
145 # define AO_HAVE_double_load
148 # if !defined(AO_HAVE_double_load_acquire) \
149 && !defined(AO_SKIPATOMIC_double_load_acquire)
150 AO_INLINE AO_double_t
151 AO_double_load_acquire(const volatile AO_double_t *addr)
155 result.AO_whole = __atomic_load_n(&addr->AO_whole, __ATOMIC_ACQUIRE);
158 # define AO_HAVE_double_load_acquire
161 # if !defined(AO_HAVE_double_store) && !defined(AO_SKIPATOMIC_double_store)
163 AO_double_store(volatile AO_double_t *addr, AO_double_t value)
165 __atomic_store_n(&addr->AO_whole, value.AO_whole, __ATOMIC_RELAXED);
167 # define AO_HAVE_double_store
170 # if !defined(AO_HAVE_double_store_release) \
171 && !defined(AO_SKIPATOMIC_double_store_release)
173 AO_double_store_release(volatile AO_double_t *addr, AO_double_t value)
175 __atomic_store_n(&addr->AO_whole, value.AO_whole, __ATOMIC_RELEASE);
177 # define AO_HAVE_double_store_release
180 #endif /* !AO_GCC_HAVE_double_SYNC_CAS || !AO_PREFER_GENERALIZED */
182 #endif /* AO_HAVE_DOUBLE_PTR_STORAGE */
184 #ifdef AO_GCC_HAVE_double_SYNC_CAS
185 # ifndef AO_HAVE_double_compare_and_swap
187 AO_double_compare_and_swap(volatile AO_double_t *addr,
188 AO_double_t old_val, AO_double_t new_val)
190 return (int)__atomic_compare_exchange_n(&addr->AO_whole,
191 &old_val.AO_whole /* p_expected */,
192 new_val.AO_whole /* desired */,
193 0 /* is_weak: false */,
194 __ATOMIC_RELAXED /* success */,
195 __ATOMIC_RELAXED /* failure */);
197 # define AO_HAVE_double_compare_and_swap
200 # ifndef AO_HAVE_double_compare_and_swap_acquire
202 AO_double_compare_and_swap_acquire(volatile AO_double_t *addr,
206 return (int)__atomic_compare_exchange_n(&addr->AO_whole,
207 &old_val.AO_whole, new_val.AO_whole, 0,
208 __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE);
210 # define AO_HAVE_double_compare_and_swap_acquire
213 # ifndef AO_HAVE_double_compare_and_swap_release
215 AO_double_compare_and_swap_release(volatile AO_double_t *addr,
219 return (int)__atomic_compare_exchange_n(&addr->AO_whole,
220 &old_val.AO_whole, new_val.AO_whole, 0,
222 __ATOMIC_RELAXED /* failure */);
224 # define AO_HAVE_double_compare_and_swap_release
227 # ifndef AO_HAVE_double_compare_and_swap_full
229 AO_double_compare_and_swap_full(volatile AO_double_t *addr,
230 AO_double_t old_val, AO_double_t new_val)
232 return (int)__atomic_compare_exchange_n(&addr->AO_whole,
233 &old_val.AO_whole, new_val.AO_whole, 0,
235 __ATOMIC_ACQUIRE /* failure */);
237 # define AO_HAVE_double_compare_and_swap_full
239 #endif /* AO_GCC_HAVE_double_SYNC_CAS */