2 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
3 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
5 * Permission is hereby granted to use or copy this program
6 * for any purpose, provided the above notices are retained on all copies.
7 * Permission to modify the code and to distribute modified code is granted,
8 * provided the above notices are retained, and a notice that the code was
9 * modified is included with the above copyright notice.
12 #if AO_CLANG_PREREQ(3, 9) && !defined(AO_DISABLE_GCC_ATOMICS)
13 /* Probably, it could be enabled for earlier clang versions as well. */
15 /* As of clang-3.9, __GCC_HAVE_SYNC_COMPARE_AND_SWAP_n are missing. */
16 # define AO_GCC_FORCE_HAVE_CAS
18 # define AO_GCC_HAVE_double_SYNC_CAS
19 # include "../standard_ao_double_t.h"
23 #else /* AO_DISABLE_GCC_ATOMICS */
25 #include "../all_aligned_atomic_load_store.h"
27 #include "../test_and_set_t_is_ao_t.h"
29 /* There's also "isync" and "barrier"; however, for all current CPU */
30 /* versions, "syncht" should suffice. Likewise, it seems that the */
31 /* auto-defined versions of *_acquire, *_release or *_full suffice for */
32 /* all current ISA implementations. */
36 __asm__ __volatile__("syncht" : : : "memory");
38 #define AO_HAVE_nop_full
40 /* The Hexagon has load-locked, store-conditional primitives, and so */
41 /* resulting code is very nearly identical to that of PowerPC. */
43 #ifndef AO_PREFER_GENERALIZED
45 AO_fetch_and_add(volatile AO_t *addr, AO_t incr)
51 " %0 = memw_locked(%3);\n" /* load and reserve */
52 " %1 = add (%0,%4);\n" /* increment */
53 " memw_locked(%3,p1) = %1;\n" /* store conditional */
54 " if (!p1) jump 1b;\n" /* retry if lost reservation */
55 : "=&r"(oldval), "=&r"(newval), "+m"(*addr)
56 : "r"(addr), "r"(incr)
60 #define AO_HAVE_fetch_and_add
63 AO_test_and_set(volatile AO_TS_t *addr)
70 " %0 = memw_locked(%2);\n" /* load and reserve */
72 " p2 = cmp.eq(%0,#0);\n" /* if load is not zero, */
73 " if (!p2.new) jump:nt 2f;\n" /* we are done */
75 " memw_locked(%2,p1) = %3;\n" /* else store conditional */
76 " if (!p1) jump 1b;\n" /* retry if lost reservation */
77 "2:\n" /* oldval is zero if we set */
78 : "=&r"(oldval), "+m"(*addr)
79 : "r"(addr), "r"(locked_value)
80 : "memory", "p1", "p2");
81 return (AO_TS_VAL_t)oldval;
83 #define AO_HAVE_test_and_set
84 #endif /* !AO_PREFER_GENERALIZED */
86 #ifndef AO_GENERALIZE_ASM_BOOL_CAS
88 AO_compare_and_swap(volatile AO_t *addr, AO_t old, AO_t new_val)
94 " %0 = memw_locked(%3);\n" /* load and reserve */
96 " p2 = cmp.eq(%0,%4);\n" /* if load is not equal to */
97 " if (!p2.new) jump:nt 2f;\n" /* old, fail */
99 " memw_locked(%3,p1) = %5;\n" /* else store conditional */
100 " if (!p1) jump 1b;\n" /* retry if lost reservation */
101 " %1 = #1\n" /* success, result = 1 */
103 : "=&r" (__oldval), "+r" (result), "+m"(*addr)
104 : "r" (addr), "r" (old), "r" (new_val)
105 : "p1", "p2", "memory"
109 # define AO_HAVE_compare_and_swap
110 #endif /* !AO_GENERALIZE_ASM_BOOL_CAS */
113 AO_fetch_compare_and_swap(volatile AO_t *addr, AO_t old_val, AO_t new_val)
117 __asm__ __volatile__(
119 " %0 = memw_locked(%2);\n" /* load and reserve */
121 " p2 = cmp.eq(%0,%3);\n" /* if load is not equal to */
122 " if (!p2.new) jump:nt 2f;\n" /* old_val, fail */
124 " memw_locked(%2,p1) = %4;\n" /* else store conditional */
125 " if (!p1) jump 1b;\n" /* retry if lost reservation */
127 : "=&r" (__oldval), "+m"(*addr)
128 : "r" (addr), "r" (old_val), "r" (new_val)
129 : "p1", "p2", "memory"
133 #define AO_HAVE_fetch_compare_and_swap
137 #endif /* AO_DISABLE_GCC_ATOMICS */