2 * Distributed under the Boost Software License, Version 1.0.
3 * (See accompanying file LICENSE_1_0.txt or copy at
4 * http://www.boost.org/LICENSE_1_0.txt)
6 * Copyright (c) 2009 Helge Bahmann
7 * Copyright (c) 2012 Tim Blechmann
8 * Copyright (c) 2014 Andrey Semashev
11 * \file atomic/detail/ops_msvc_x86.hpp
13 * This header contains implementation of the \c operations template.
16 #ifndef BOOST_ATOMIC_DETAIL_OPS_MSVC_X86_HPP_INCLUDED_
17 #define BOOST_ATOMIC_DETAIL_OPS_MSVC_X86_HPP_INCLUDED_
19 #include <boost/memory_order.hpp>
20 #include <boost/type_traits/make_signed.hpp>
21 #include <boost/atomic/detail/config.hpp>
22 #include <boost/atomic/detail/interlocked.hpp>
23 #include <boost/atomic/detail/storage_type.hpp>
24 #include <boost/atomic/detail/operations_fwd.hpp>
25 #include <boost/atomic/capabilities.hpp>
26 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B) || defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
27 #include <boost/cstdint.hpp>
28 #include <boost/atomic/detail/ops_cas_based.hpp>
30 #include <boost/atomic/detail/ops_msvc_common.hpp>
31 #if !defined(_M_IX86) && !(defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8) && defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16))
32 #include <boost/atomic/detail/ops_extending_cas_based.hpp>
35 #ifdef BOOST_HAS_PRAGMA_ONCE
39 #if defined(BOOST_MSVC)
41 // frame pointer register 'ebx' modified by inline assembly code. See the note below.
42 #pragma warning(disable: 4731)
45 #if defined(_MSC_VER) && (defined(_M_AMD64) || (defined(_M_IX86) && defined(_M_IX86_FP) && _M_IX86_FP >= 2))
46 extern "C" void _mm_mfence(void);
47 #if defined(BOOST_MSVC)
48 #pragma intrinsic(_mm_mfence)
57 * Implementation note for asm blocks.
59 * http://msdn.microsoft.com/en-us/data/k1a8ss06%28v=vs.105%29
61 * Some SSE types require eight-byte stack alignment, forcing the compiler to emit dynamic stack-alignment code.
62 * To be able to access both the local variables and the function parameters after the alignment, the compiler
63 * maintains two frame pointers. If the compiler performs frame pointer omission (FPO), it will use EBP and ESP.
64 * If the compiler does not perform FPO, it will use EBX and EBP. To ensure code runs correctly, do not modify EBX
65 * in asm code if the function requires dynamic stack alignment as it could modify the frame pointer.
66 * Either move the eight-byte aligned types out of the function, or avoid using EBX.
68 * Since we have no way of knowing that the compiler uses FPO, we have to always save and restore ebx
69 * whenever we have to clobber it. Additionally, we disable warning C4731 above so that the compiler
70 * doesn't spam about ebx use.
73 struct msvc_x86_operations_base
75 static BOOST_FORCEINLINE void hardware_full_fence() BOOST_NOEXCEPT
77 #if defined(_MSC_VER) && (defined(_M_AMD64) || (defined(_M_IX86) && defined(_M_IX86_FP) && _M_IX86_FP >= 2))
78 // Use mfence only if SSE2 is available
82 BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&tmp, 0);
86 static BOOST_FORCEINLINE void fence_before(memory_order) BOOST_NOEXCEPT
88 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
91 static BOOST_FORCEINLINE void fence_after(memory_order) BOOST_NOEXCEPT
93 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
96 static BOOST_FORCEINLINE void fence_after_load(memory_order) BOOST_NOEXCEPT
98 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
100 // On x86 and x86_64 there is no need for a hardware barrier,
101 // even if seq_cst memory order is requested, because all
102 // seq_cst writes are implemented with lock-prefixed operations
103 // or xchg which has implied lock prefix. Therefore normal loads
104 // are already ordered with seq_cst stores on these architectures.
108 template< typename T, typename Derived >
109 struct msvc_x86_operations :
110 public msvc_x86_operations_base
112 typedef T storage_type;
114 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
116 if (order != memory_order_seq_cst)
124 Derived::exchange(storage, v, order);
128 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
130 storage_type v = storage;
131 fence_after_load(order);
135 static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
137 typedef typename make_signed< storage_type >::type signed_storage_type;
138 return Derived::fetch_add(storage, static_cast< storage_type >(-static_cast< signed_storage_type >(v)), order);
141 static BOOST_FORCEINLINE bool compare_exchange_weak(
142 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
144 return Derived::compare_exchange_strong(storage, expected, desired, success_order, failure_order);
147 static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
149 return !!Derived::exchange(storage, (storage_type)1, order);
152 static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
154 store(storage, (storage_type)0, order);
157 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
163 template< bool Signed >
164 struct operations< 4u, Signed > :
165 public msvc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > >
167 typedef msvc_x86_operations< typename make_storage_type< 4u, Signed >::type, operations< 4u, Signed > > base_type;
168 typedef typename base_type::storage_type storage_type;
170 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
172 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&storage, v));
175 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
177 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&storage, v));
180 static BOOST_FORCEINLINE bool compare_exchange_strong(
181 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
183 storage_type previous = expected;
184 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&storage, desired, previous));
186 return (previous == old_val);
189 #if defined(BOOST_ATOMIC_INTERLOCKED_AND)
190 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
192 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND(&storage, v));
195 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
197 storage_type res = storage;
198 while (!compare_exchange_strong(storage, res, res & v, order, memory_order_relaxed)) {}
203 #if defined(BOOST_ATOMIC_INTERLOCKED_OR)
204 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
206 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR(&storage, v));
209 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
211 storage_type res = storage;
212 while (!compare_exchange_strong(storage, res, res | v, order, memory_order_relaxed)) {}
217 #if defined(BOOST_ATOMIC_INTERLOCKED_XOR)
218 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
220 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&storage, v));
223 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
225 storage_type res = storage;
226 while (!compare_exchange_strong(storage, res, res ^ v, order, memory_order_relaxed)) {}
232 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8)
234 template< bool Signed >
235 struct operations< 1u, Signed > :
236 public msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
238 typedef msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
239 typedef typename base_type::storage_type storage_type;
241 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
243 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&storage, v));
246 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
248 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&storage, v));
251 static BOOST_FORCEINLINE bool compare_exchange_strong(
252 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
254 storage_type previous = expected;
255 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&storage, desired, previous));
257 return (previous == old_val);
260 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
262 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&storage, v));
265 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
267 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&storage, v));
270 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
272 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&storage, v));
276 #elif defined(_M_IX86)
278 template< bool Signed >
279 struct operations< 1u, Signed > :
280 public msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > >
282 typedef msvc_x86_operations< typename make_storage_type< 1u, Signed >::type, operations< 1u, Signed > > base_type;
283 typedef typename base_type::storage_type storage_type;
285 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
287 base_type::fence_before(order);
292 lock xadd byte ptr [edx], al
295 base_type::fence_after(order);
299 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
301 base_type::fence_before(order);
306 xchg byte ptr [edx], al
309 base_type::fence_after(order);
313 static BOOST_FORCEINLINE bool compare_exchange_strong(
314 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order) BOOST_NOEXCEPT
316 base_type::fence_before(success_order);
322 movzx eax, byte ptr [esi]
324 lock cmpxchg byte ptr [edi], dl
325 mov byte ptr [esi], al
328 // The success and failure fences are equivalent anyway
329 base_type::fence_after(success_order);
333 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
335 base_type::fence_before(order);
343 movzx eax, byte ptr [edi]
348 lock cmpxchg byte ptr [edi], dl
353 base_type::fence_after(order);
357 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
359 base_type::fence_before(order);
367 movzx eax, byte ptr [edi]
372 lock cmpxchg byte ptr [edi], dl
377 base_type::fence_after(order);
381 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
383 base_type::fence_before(order);
391 movzx eax, byte ptr [edi]
396 lock cmpxchg byte ptr [edi], dl
401 base_type::fence_after(order);
408 template< bool Signed >
409 struct operations< 1u, Signed > :
410 public extending_cas_based_operations< operations< 4u, Signed >, 1u, Signed >
416 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16)
418 template< bool Signed >
419 struct operations< 2u, Signed > :
420 public msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
422 typedef msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
423 typedef typename base_type::storage_type storage_type;
425 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
427 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&storage, v));
430 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
432 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&storage, v));
435 static BOOST_FORCEINLINE bool compare_exchange_strong(
436 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
438 storage_type previous = expected;
439 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&storage, desired, previous));
441 return (previous == old_val);
444 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
446 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&storage, v));
449 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
451 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&storage, v));
454 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
456 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&storage, v));
460 #elif defined(_M_IX86)
462 template< bool Signed >
463 struct operations< 2u, Signed > :
464 public msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > >
466 typedef msvc_x86_operations< typename make_storage_type< 2u, Signed >::type, operations< 2u, Signed > > base_type;
467 typedef typename base_type::storage_type storage_type;
469 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
471 base_type::fence_before(order);
476 lock xadd word ptr [edx], ax
479 base_type::fence_after(order);
483 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
485 base_type::fence_before(order);
490 xchg word ptr [edx], ax
493 base_type::fence_after(order);
497 static BOOST_FORCEINLINE bool compare_exchange_strong(
498 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order) BOOST_NOEXCEPT
500 base_type::fence_before(success_order);
506 movzx eax, word ptr [esi]
508 lock cmpxchg word ptr [edi], dx
509 mov word ptr [esi], ax
512 // The success and failure fences are equivalent anyway
513 base_type::fence_after(success_order);
517 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
519 base_type::fence_before(order);
527 movzx eax, word ptr [edi]
532 lock cmpxchg word ptr [edi], dx
537 base_type::fence_after(order);
541 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
543 base_type::fence_before(order);
551 movzx eax, word ptr [edi]
556 lock cmpxchg word ptr [edi], dx
561 base_type::fence_after(order);
565 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
567 base_type::fence_before(order);
575 movzx eax, word ptr [edi]
580 lock cmpxchg word ptr [edi], dx
585 base_type::fence_after(order);
592 template< bool Signed >
593 struct operations< 2u, Signed > :
594 public extending_cas_based_operations< operations< 4u, Signed >, 2u, Signed >
601 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG8B)
603 template< bool Signed >
606 typedef typename make_storage_type< 8u, Signed >::type storage_type;
608 // Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations:
610 // The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically:
611 // * Reading or writing a quadword aligned on a 64-bit boundary
613 // Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations
614 // have at least 8 byte alignment. The only unfortunate case is when atomic is placeod on the stack and it is not 8-byte aligned (like on 32 bit Windows).
616 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
618 storage_type volatile* p = &storage;
619 if (((uint32_t)p & 0x00000007) == 0)
621 #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
627 vmovq qword ptr [edx], xmm4
634 movq qword ptr [edx], xmm4
642 fistp qword ptr [edx]
653 mov ebx, dword ptr [v]
654 mov ecx, dword ptr [v + 4]
655 mov eax, dword ptr [edi]
656 mov edx, dword ptr [edi + 4]
659 lock cmpxchg8b qword ptr [edi]
666 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
668 storage_type const volatile* p = &storage;
671 if (((uint32_t)p & 0x00000007) == 0)
673 #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
678 vmovq xmm4, qword ptr [edx]
685 movq xmm4, qword ptr [edx]
700 // We don't care for comparison result here; the previous value will be stored into value anyway.
701 // Also we don't care for ebx and ecx values, they just have to be equal to eax and edx before cmpxchg8b.
707 lock cmpxchg8b qword ptr [edi]
708 mov dword ptr [value], eax
709 mov dword ptr [value + 4], edx
716 static BOOST_FORCEINLINE bool compare_exchange_strong(
717 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
719 storage_type volatile* p = &storage;
720 #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
721 const storage_type old_val = (storage_type)BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(p, desired, expected);
722 const bool result = (old_val == expected);
733 mov ebx, dword ptr [desired]
734 mov ecx, dword ptr [desired + 4]
735 mov eax, dword ptr [esi]
736 mov edx, dword ptr [esi + 4]
737 lock cmpxchg8b qword ptr [edi]
738 mov dword ptr [esi], eax
739 mov dword ptr [esi + 4], edx
747 static BOOST_FORCEINLINE bool compare_exchange_weak(
748 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
750 return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
753 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
759 template< bool Signed >
760 struct operations< 8u, Signed > :
761 public cas_based_operations< msvc_dcas_x86< Signed > >
765 #elif defined(_M_AMD64)
767 template< bool Signed >
768 struct operations< 8u, Signed > :
769 public msvc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > >
771 typedef msvc_x86_operations< typename make_storage_type< 8u, Signed >::type, operations< 8u, Signed > > base_type;
772 typedef typename base_type::storage_type storage_type;
774 static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
776 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&storage, v));
779 static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
781 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&storage, v));
784 static BOOST_FORCEINLINE bool compare_exchange_strong(
785 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
787 storage_type previous = expected;
788 storage_type old_val = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&storage, desired, previous));
790 return (previous == old_val);
793 static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
795 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&storage, v));
798 static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
800 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&storage, v));
803 static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
805 return static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&storage, v));
811 #if defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
813 template< bool Signed >
814 struct msvc_dcas_x86_64
816 typedef typename make_storage_type< 16u, Signed >::type storage_type;
818 static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
820 storage_type value = const_cast< storage_type& >(storage);
821 while (!BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, v, &value)) {}
824 static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order) BOOST_NOEXCEPT
826 storage_type value = storage_type();
827 BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, value, &value);
831 static BOOST_FORCEINLINE bool compare_exchange_strong(
832 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order, memory_order) BOOST_NOEXCEPT
834 return !!BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE128(&storage, desired, &expected);
837 static BOOST_FORCEINLINE bool compare_exchange_weak(
838 storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
840 return compare_exchange_strong(storage, expected, desired, success_order, failure_order);
843 static BOOST_FORCEINLINE bool is_lock_free(storage_type const volatile&) BOOST_NOEXCEPT
849 template< bool Signed >
850 struct operations< 16u, Signed > :
851 public cas_based_operations< msvc_dcas_x86_64< Signed > >
855 #endif // defined(BOOST_ATOMIC_DETAIL_X86_HAS_CMPXCHG16B)
857 BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
859 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
860 if (order == memory_order_seq_cst)
861 msvc_x86_operations_base::hardware_full_fence();
862 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
865 BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
867 if (order != memory_order_relaxed)
868 BOOST_ATOMIC_DETAIL_COMPILER_BARRIER();
871 } // namespace detail
872 } // namespace atomics
875 #if defined(BOOST_MSVC)
879 #endif // BOOST_ATOMIC_DETAIL_OPS_MSVC_X86_HPP_INCLUDED_