1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 // This file is an internal atomic implementation, use base/atomicops.h instead.
7 #ifndef BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
8 #define BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
11 #include <libkern/OSAtomic.h>
16 inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
21 if (OSAtomicCompareAndSwap32(old_value, new_value,
22 const_cast<Atomic32*>(ptr))) {
26 } while (prev_value == old_value);
30 inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
35 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
36 const_cast<Atomic32*>(ptr)));
40 inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
42 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
45 inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
47 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
50 inline void MemoryBarrier() {
54 inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
59 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
60 const_cast<Atomic32*>(ptr))) {
64 } while (prev_value == old_value);
68 inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
71 return Acquire_CompareAndSwap(ptr, old_value, new_value);
74 inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
78 inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
83 inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
88 inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
92 inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
93 Atomic32 value = *ptr;
98 inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
105 // 64-bit implementation on 64-bit platform
107 inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
109 Atomic64 new_value) {
112 if (OSAtomicCompareAndSwap64(old_value, new_value,
113 reinterpret_cast<volatile int64_t*>(ptr))) {
117 } while (prev_value == old_value);
121 inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
122 Atomic64 new_value) {
126 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
127 reinterpret_cast<volatile int64_t*>(ptr)));
131 inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
132 Atomic64 increment) {
133 return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
136 inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
137 Atomic64 increment) {
138 return OSAtomicAdd64Barrier(increment,
139 reinterpret_cast<volatile int64_t*>(ptr));
142 inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
144 Atomic64 new_value) {
147 if (OSAtomicCompareAndSwap64Barrier(
148 old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
152 } while (prev_value == old_value);
156 inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
158 Atomic64 new_value) {
159 // The lib kern interface does not distinguish between
160 // Acquire and Release memory barriers; they are equivalent.
161 return Acquire_CompareAndSwap(ptr, old_value, new_value);
164 inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
168 inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
173 inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
178 inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
182 inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
183 Atomic64 value = *ptr;
188 inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
193 #endif // defined(__LP64__)
195 } // namespace base::subtle
198 #endif // BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_