1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // This file is an internal atomic implementation, use base/atomicops.h instead.
7 #ifndef BASE_ATOMICOPS_INTERNALS_MAC_H_
8 #define BASE_ATOMICOPS_INTERNALS_MAC_H_
10 #include <libkern/OSAtomic.h>
15 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
20 if (OSAtomicCompareAndSwap32(old_value, new_value,
21 const_cast<Atomic32*>(ptr))) {
25 } while (prev_value == old_value);
29 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
34 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
35 const_cast<Atomic32*>(ptr)));
39 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
41 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
44 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
46 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
49 inline void MemoryBarrier() {
53 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
58 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
59 const_cast<Atomic32*>(ptr))) {
63 } while (prev_value == old_value);
67 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
70 return Acquire_CompareAndSwap(ptr, old_value, new_value);
73 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
77 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
82 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
87 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
91 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
92 Atomic32 value = *ptr;
97 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
104 // 64-bit implementation on 64-bit platform
106 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
108 Atomic64 new_value) {
111 if (OSAtomicCompareAndSwap64(old_value, new_value,
112 reinterpret_cast<volatile int64_t*>(ptr))) {
116 } while (prev_value == old_value);
120 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
121 Atomic64 new_value) {
125 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
126 reinterpret_cast<volatile int64_t*>(ptr)));
130 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
131 Atomic64 increment) {
132 return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
135 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
136 Atomic64 increment) {
137 return OSAtomicAdd64Barrier(increment,
138 reinterpret_cast<volatile int64_t*>(ptr));
141 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
143 Atomic64 new_value) {
146 if (OSAtomicCompareAndSwap64Barrier(
147 old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
151 } while (prev_value == old_value);
155 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
157 Atomic64 new_value) {
158 // The lib kern interface does not distinguish between
159 // Acquire and Release memory barriers; they are equivalent.
160 return Acquire_CompareAndSwap(ptr, old_value, new_value);
163 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
167 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
172 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
177 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
181 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
182 Atomic64 value = *ptr;
187 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
192 #endif // defined(__LP64__)
194 } // namespace base::subtle
197 #endif // BASE_ATOMICOPS_INTERNALS_MAC_H_