2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3 * Copyright (c) 1996-1999 by Silicon Graphics. All rights reserved.
4 * Copyright (c) 1999-2003 by Hewlett-Packard Company. All rights reserved.
7 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
8 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
10 * Permission is hereby granted to use or copy this program
11 * for any purpose, provided the above notices are retained on all copies.
12 * Permission to modify the code and to distribute modified code is granted,
13 * provided the above notices are retained, and a notice that the code was
14 * modified is included with the above copyright notice.
18 #include "../test_and_set_t_is_ao_t.h"
20 #include "../standard_ao_double_t.h"
22 #ifndef AO_UNIPROCESSOR
26 __asm__ __volatile__("dmb st" : : : "memory");
28 # define AO_HAVE_nop_write
31 /* TODO: Adjust version check on fixing double-wide AO support in GCC. */
35 AO_double_load(const volatile AO_double_t *addr)
40 /* Note that STXP cannot be discarded because LD[A]XP is not */
41 /* single-copy atomic (unlike LDREXD for 32-bit ARM). */
43 __asm__ __volatile__("//AO_double_load\n"
45 " ldxp %w0, %w1, %3\n"
46 " stxp %w2, %w0, %w1, %3"
49 " stxp %w2, %0, %1, %3"
51 : "=&r" (result.AO_val1), "=&r" (result.AO_val2), "=&r" (status)
53 } while (AO_EXPECT_FALSE(status));
56 # define AO_HAVE_double_load
59 AO_double_load_acquire(const volatile AO_double_t *addr)
65 __asm__ __volatile__("//AO_double_load_acquire\n"
67 " ldaxp %w0, %w1, %3\n"
68 " stxp %w2, %w0, %w1, %3"
71 " stxp %w2, %0, %1, %3"
73 : "=&r" (result.AO_val1), "=&r" (result.AO_val2), "=&r" (status)
75 } while (AO_EXPECT_FALSE(status));
78 # define AO_HAVE_double_load_acquire
81 AO_double_store(volatile AO_double_t *addr, AO_double_t value)
87 __asm__ __volatile__("//AO_double_store\n"
89 " ldxp %w0, %w1, %3\n"
90 " stxp %w2, %w4, %w5, %3"
93 " stxp %w2, %4, %5, %3"
95 : "=&r" (old_val.AO_val1), "=&r" (old_val.AO_val2), "=&r" (status),
97 : "r" (value.AO_val1), "r" (value.AO_val2));
98 /* Compared to the arm.h implementation, the 'cc' (flags) are not */
99 /* clobbered because A64 has no concept of conditional execution. */
100 } while (AO_EXPECT_FALSE(status));
102 # define AO_HAVE_double_store
105 AO_double_store_release(volatile AO_double_t *addr, AO_double_t value)
111 __asm__ __volatile__("//AO_double_store_release\n"
113 " ldxp %w0, %w1, %3\n"
114 " stlxp %w2, %w4, %w5, %3"
117 " stlxp %w2, %4, %5, %3"
119 : "=&r" (old_val.AO_val1), "=&r" (old_val.AO_val2), "=&r" (status),
121 : "r" (value.AO_val1), "r" (value.AO_val2));
122 } while (AO_EXPECT_FALSE(status));
124 # define AO_HAVE_double_store_release
127 AO_double_compare_and_swap(volatile AO_double_t *addr,
128 AO_double_t old_val, AO_double_t new_val)
134 __asm__ __volatile__("//AO_double_compare_and_swap\n"
136 " ldxp %w0, %w1, %2\n"
140 : "=&r" (tmp.AO_val1), "=&r" (tmp.AO_val2)
142 if (tmp.AO_val1 != old_val.AO_val1 || tmp.AO_val2 != old_val.AO_val2)
144 __asm__ __volatile__(
146 " stxp %w0, %w2, %w3, %1\n"
148 " stxp %w0, %2, %3, %1\n"
150 : "=&r" (result), "=Q" (*addr)
151 : "r" (new_val.AO_val1), "r" (new_val.AO_val2));
152 } while (AO_EXPECT_FALSE(result));
155 # define AO_HAVE_double_compare_and_swap
158 AO_double_compare_and_swap_acquire(volatile AO_double_t *addr,
159 AO_double_t old_val, AO_double_t new_val)
165 __asm__ __volatile__("//AO_double_compare_and_swap_acquire\n"
167 " ldaxp %w0, %w1, %2\n"
169 " ldaxp %0, %1, %2\n"
171 : "=&r" (tmp.AO_val1), "=&r" (tmp.AO_val2)
173 if (tmp.AO_val1 != old_val.AO_val1 || tmp.AO_val2 != old_val.AO_val2)
175 __asm__ __volatile__(
177 " stxp %w0, %w2, %w3, %1\n"
179 " stxp %w0, %2, %3, %1\n"
181 : "=&r" (result), "=Q" (*addr)
182 : "r" (new_val.AO_val1), "r" (new_val.AO_val2));
183 } while (AO_EXPECT_FALSE(result));
186 # define AO_HAVE_double_compare_and_swap_acquire
189 AO_double_compare_and_swap_release(volatile AO_double_t *addr,
190 AO_double_t old_val, AO_double_t new_val)
196 __asm__ __volatile__("//AO_double_compare_and_swap_release\n"
198 " ldxp %w0, %w1, %2\n"
202 : "=&r" (tmp.AO_val1), "=&r" (tmp.AO_val2)
204 if (tmp.AO_val1 != old_val.AO_val1 || tmp.AO_val2 != old_val.AO_val2)
206 __asm__ __volatile__(
208 " stlxp %w0, %w2, %w3, %1\n"
210 " stlxp %w0, %2, %3, %1\n"
212 : "=&r" (result), "=Q" (*addr)
213 : "r" (new_val.AO_val1), "r" (new_val.AO_val2));
214 } while (AO_EXPECT_FALSE(result));
217 # define AO_HAVE_double_compare_and_swap_release
220 AO_double_compare_and_swap_full(volatile AO_double_t *addr,
221 AO_double_t old_val, AO_double_t new_val)
227 __asm__ __volatile__("//AO_double_compare_and_swap_full\n"
229 " ldaxp %w0, %w1, %2\n"
231 " ldaxp %0, %1, %2\n"
233 : "=&r" (tmp.AO_val1), "=&r" (tmp.AO_val2)
235 if (tmp.AO_val1 != old_val.AO_val1 || tmp.AO_val2 != old_val.AO_val2)
237 __asm__ __volatile__(
239 " stlxp %w0, %w2, %w3, %1\n"
241 " stlxp %w0, %2, %3, %1\n"
243 : "=&r" (result), "=Q" (*addr)
244 : "r" (new_val.AO_val1), "r" (new_val.AO_val2));
245 } while (AO_EXPECT_FALSE(result));
248 # define AO_HAVE_double_compare_and_swap_full
249 #endif /* __GNUC__ >= 4 */