2 * Copyright © 2007 Chris Wilson
3 * Copyright © 2009,2010 Red Hat, Inc.
4 * Copyright © 2011,2012 Google, Inc.
6 * This is part of HarfBuzz, a text shaping library.
8 * Permission is hereby granted, without written agreement and without
9 * license or royalty fees, to use, copy, modify, and distribute this
10 * software and its documentation for any purpose, provided that the
11 * above copyright notice and the following two paragraphs appear in
12 * all copies of this software.
14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
27 * Chris Wilson <chris@chris-wilson.co.uk>
28 * Red Hat Author(s): Behdad Esfahbod
29 * Google Author(s): Behdad Esfahbod
40 * Atomic integers and pointers.
44 /* We need external help for these */
46 #if defined(hb_atomic_int_impl_add) \
47 && defined(hb_atomic_ptr_impl_get) \
48 && defined(hb_atomic_ptr_impl_cmpexch)
50 /* Defined externally, i.e. in config.h. */
53 #elif !defined(HB_NO_MT) && defined(__ATOMIC_ACQUIRE)
55 /* C++11-style GCC primitives. */
57 #define _hb_memory_barrier() __sync_synchronize ()
59 #define hb_atomic_int_impl_add(AI, V) __atomic_fetch_add ((AI), (V), __ATOMIC_ACQ_REL)
60 #define hb_atomic_int_impl_set_relaxed(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELAXED)
61 #define hb_atomic_int_impl_set(AI, V) __atomic_store_n ((AI), (V), __ATOMIC_RELEASE)
62 #define hb_atomic_int_impl_get_relaxed(AI) __atomic_load_n ((AI), __ATOMIC_RELAXED)
63 #define hb_atomic_int_impl_get(AI) __atomic_load_n ((AI), __ATOMIC_ACQUIRE)
65 #define hb_atomic_ptr_impl_set_relaxed(P, V) __atomic_store_n ((P), (V), __ATOMIC_RELAXED)
66 #define hb_atomic_ptr_impl_get_relaxed(P) __atomic_load_n ((P), __ATOMIC_RELAXED)
67 #define hb_atomic_ptr_impl_get(P) __atomic_load_n ((P), __ATOMIC_ACQUIRE)
69 _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
71 const void *O = O_; // Need lvalue
72 return __atomic_compare_exchange_n ((void **) P, (void **) &O, (void *) N, true, __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
74 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
76 #elif !defined(HB_NO_MT) && __cplusplus >= 201103L
82 #define _hb_memory_barrier() std::atomic_thread_fence(std::memory_order_ack_rel)
83 #define _hb_memory_r_barrier() std::atomic_thread_fence(std::memory_order_acquire)
84 #define _hb_memory_w_barrier() std::atomic_thread_fence(std::memory_order_release)
86 #define hb_atomic_int_impl_add(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->fetch_add ((V), std::memory_order_acq_rel))
87 #define hb_atomic_int_impl_set_relaxed(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_relaxed))
88 #define hb_atomic_int_impl_set(AI, V) (reinterpret_cast<std::atomic<int> *> (AI)->store ((V), std::memory_order_release))
89 #define hb_atomic_int_impl_get_relaxed(AI) (reinterpret_cast<std::atomic<int> const *> (AI)->load (std::memory_order_relaxed))
90 #define hb_atomic_int_impl_get(AI) (reinterpret_cast<std::atomic<int> const *> (AI)->load (std::memory_order_acquire))
92 #define hb_atomic_ptr_impl_set_relaxed(P, V) (reinterpret_cast<std::atomic<void*> *> (P)->store ((V), std::memory_order_relaxed))
93 #define hb_atomic_ptr_impl_get_relaxed(P) (reinterpret_cast<std::atomic<void*> const *> (P)->load (std::memory_order_relaxed))
94 #define hb_atomic_ptr_impl_get(P) (reinterpret_cast<std::atomic<void*> *> (P)->load (std::memory_order_acquire))
96 _hb_atomic_ptr_impl_cmplexch (const void **P, const void *O_, const void *N)
98 const void *O = O_; // Need lvalue
99 return reinterpret_cast<std::atomic<const void*> *> (P)->compare_exchange_weak (O, N, std::memory_order_acq_rel, std::memory_order_relaxed);
101 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_atomic_ptr_impl_cmplexch ((const void **) (P), (O), (N))
104 #elif !defined(HB_NO_MT) && defined(_WIN32)
108 static inline void _hb_memory_barrier ()
110 #if !defined(MemoryBarrier) && !defined(__MINGW32_VERSION)
111 /* MinGW has a convoluted history of supporting MemoryBarrier. */
113 InterlockedExchange (&dummy, 1);
118 #define _hb_memory_barrier() _hb_memory_barrier ()
120 #define hb_atomic_int_impl_add(AI, V) InterlockedExchangeAdd ((LONG *) (AI), (V))
121 static_assert ((sizeof (LONG) == sizeof (int)), "");
123 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (InterlockedCompareExchangePointer ((P), (N), (O)) == (O))
126 #elif !defined(HB_NO_MT) && defined(HAVE_INTEL_ATOMIC_PRIMITIVES)
128 #define _hb_memory_barrier() __sync_synchronize ()
130 #define hb_atomic_int_impl_add(AI, V) __sync_fetch_and_add ((AI), (V))
132 #define hb_atomic_ptr_impl_cmpexch(P,O,N) __sync_bool_compare_and_swap ((P), (O), (N))
135 #elif !defined(HB_NO_MT) && defined(HAVE_SOLARIS_ATOMIC_OPS)
138 #include <mbarrier.h>
140 #define _hb_memory_r_barrier() __machine_r_barrier ()
141 #define _hb_memory_w_barrier() __machine_w_barrier ()
142 #define _hb_memory_barrier() __machine_rw_barrier ()
144 static inline int _hb_fetch_and_add (int *AI, int V)
146 _hb_memory_w_barrier ();
147 int result = atomic_add_int_nv ((uint_t *) AI, V) - V;
148 _hb_memory_r_barrier ();
151 static inline bool _hb_compare_and_swap_ptr (void **P, void *O, void *N)
153 _hb_memory_w_barrier ();
154 bool result = atomic_cas_ptr (P, O, N) == O;
155 _hb_memory_r_barrier ();
159 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
161 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swap_ptr ((P), (O), (N))
164 #elif !defined(HB_NO_MT) && defined(__APPLE__)
166 #include <libkern/OSAtomic.h>
167 #ifdef __MAC_OS_X_MIN_REQUIRED
168 #include <AvailabilityMacros.h>
169 #elif defined(__IPHONE_OS_MIN_REQUIRED)
170 #include <Availability.h>
173 #define _hb_memory_barrier() OSMemoryBarrier ()
175 #define hb_atomic_int_impl_add(AI, V) (OSAtomicAdd32Barrier ((V), (AI)) - (V))
177 #if (MAC_OS_X_VERSION_MIN_REQUIRED > MAC_OS_X_VERSION_10_4 || __IPHONE_VERSION_MIN_REQUIRED >= 20100)
178 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwapPtrBarrier ((O), (N), (P))
180 #if __ppc64__ || __x86_64__ || __aarch64__
181 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap64Barrier ((int64_t) (O), (int64_t) (N), (int64_t*) (P))
183 #define hb_atomic_ptr_impl_cmpexch(P,O,N) OSAtomicCompareAndSwap32Barrier ((int32_t) (O), (int32_t) (N), (int32_t*) (P))
188 #elif !defined(HB_NO_MT) && defined(_AIX) && (defined(__IBMCPP__) || defined(__ibmxl__))
190 #include <builtins.h>
192 #define _hb_memory_barrier() __lwsync ()
194 static inline int _hb_fetch_and_add (int *AI, int V)
196 _hb_memory_barrier ();
197 int result = __fetch_and_add (AI, V);
198 _hb_memory_barrier ();
201 static inline bool _hb_compare_and_swaplp (long *P, long O, long N)
203 _hb_memory_barrier ();
204 bool result = __compare_and_swaplp (P, &O, N);
205 _hb_memory_barrier ();
209 #define hb_atomic_int_impl_add(AI, V) _hb_fetch_and_add ((AI), (V))
211 #define hb_atomic_ptr_impl_cmpexch(P,O,N) _hb_compare_and_swaplp ((long *) (P), (long) (O), (long) (N))
212 static_assert ((sizeof (long) == sizeof (void *)), "");
215 #elif defined(HB_NO_MT)
217 #define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
219 #define _hb_memory_barrier() do {} while (0)
221 #define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
226 #error "Could not find any system to define atomic_int macros."
227 #error "Check hb-atomic.hh for possible resolutions."
232 #ifndef _hb_memory_r_barrier
233 #define _hb_memory_r_barrier() _hb_memory_barrier ()
235 #ifndef _hb_memory_w_barrier
236 #define _hb_memory_w_barrier() _hb_memory_barrier ()
238 #ifndef hb_atomic_int_impl_set_relaxed
239 #define hb_atomic_int_impl_set_relaxed(AI, V) (*(AI) = (V))
241 #ifndef hb_atomic_int_impl_get_relaxed
242 #define hb_atomic_int_impl_get_relaxed(AI) (*(AI))
245 #ifndef hb_atomic_ptr_impl_set_relaxed
246 #define hb_atomic_ptr_impl_set_relaxed(P, V) (*(P) = (V))
248 #ifndef hb_atomic_ptr_impl_get_relaxed
249 #define hb_atomic_ptr_impl_get_relaxed(P) (*(P))
251 #ifndef hb_atomic_int_impl_set
252 inline void hb_atomic_int_impl_set (int *AI, int v) { _hb_memory_w_barrier (); *AI = v; }
254 #ifndef hb_atomic_int_impl_get
255 inline int hb_atomic_int_impl_get (const int *AI) { int v = *AI; _hb_memory_r_barrier (); return v; }
257 #ifndef hb_atomic_ptr_impl_get
258 inline void *hb_atomic_ptr_impl_get (void ** const P) { void *v = *P; _hb_memory_r_barrier (); return v; }
262 #define HB_ATOMIC_INT_INIT(V) {V}
263 struct hb_atomic_int_t
265 void set_relaxed (int v_) { hb_atomic_int_impl_set_relaxed (&v, v_); }
266 void set (int v_) { hb_atomic_int_impl_set (&v, v_); }
267 int get_relaxed () const { return hb_atomic_int_impl_get_relaxed (&v); }
268 int get () const { return hb_atomic_int_impl_get (&v); }
269 int inc () { return hb_atomic_int_impl_add (&v, 1); }
270 int dec () { return hb_atomic_int_impl_add (&v, -1); }
276 #define HB_ATOMIC_PTR_INIT(V) {V}
277 template <typename P>
278 struct hb_atomic_ptr_t
280 typedef hb_remove_pointer<P> T;
282 void init (T* v_ = nullptr) { set_relaxed (v_); }
283 void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }
284 T *get_relaxed () const { return (T *) hb_atomic_ptr_impl_get_relaxed (&v); }
285 T *get () const { return (T *) hb_atomic_ptr_impl_get ((void **) &v); }
286 bool cmpexch (const T *old, T *new_) const { return hb_atomic_ptr_impl_cmpexch ((void **) &v, (void *) old, (void *) new_); }
288 T * operator -> () const { return get (); }
289 template <typename C> operator C * () const { return get (); }
295 #endif /* HB_ATOMIC_HH */