1 // atomic standard header
\r
9 #error <atomic> is not supported when compiling with /clr:pure.
\r
10 #endif /* _M_CEE_PURE */
\r
12 #include <type_traits>
\r
19 #include <xatomic.h>
\r
21 #pragma pack(push,_CRT_PACKING)
\r
22 #pragma warning(push,_STL_WARNING_LEVEL)
\r
23 #pragma warning(disable: _STL_DISABLED_WARNINGS)
\r
24 #pragma push_macro("new")
\r
27 #pragma warning(disable: 4522) // multiple assignment operators specified
\r
29 // LOCK-FREE PROPERTY
\r
30 #define ATOMIC_BOOL_LOCK_FREE 2
\r
31 #define ATOMIC_CHAR_LOCK_FREE 2
\r
32 #define ATOMIC_CHAR16_T_LOCK_FREE 2
\r
33 #define ATOMIC_CHAR32_T_LOCK_FREE 2
\r
34 #define ATOMIC_WCHAR_T_LOCK_FREE 2
\r
35 #define ATOMIC_SHORT_LOCK_FREE 2
\r
36 #define ATOMIC_INT_LOCK_FREE 2
\r
37 #define ATOMIC_LONG_LOCK_FREE 2
\r
38 #define ATOMIC_LLONG_LOCK_FREE 2
\r
39 #define ATOMIC_POINTER_LOCK_FREE 2
\r
42 inline memory_order _Get_memory_order(memory_order _Order)
\r
43 { // get second memory_order argument for cas functions
\r
44 // that take only one memory_order argument
\r
45 return (_Order == memory_order_acq_rel ? memory_order_acquire
\r
46 : _Order == memory_order_release ? memory_order_relaxed
\r
50 // TEMPLATE FUNCTION kill_dependency
\r
52 _Ty kill_dependency(_Ty _Arg) _NOEXCEPT
\r
53 { // magic template that kills dependency ordering when called
\r
57 // GENERAL OPERATIONS ON ATOMIC TYPES (FORWARD DECLARATIONS)
\r
58 template <class _Ty>
\r
60 template <class _Ty>
\r
61 bool atomic_is_lock_free(const volatile atomic<_Ty> *) throw(); // TRANSITION, VSO#174686
\r
62 template <class _Ty>
\r
63 bool atomic_is_lock_free(const atomic<_Ty> *) throw(); // TRANSITION, VSO#174686
\r
64 template <class _Ty>
\r
65 void atomic_init(volatile atomic<_Ty> *, _Ty) throw(); // TRANSITION, VSO#174686
\r
66 template <class _Ty>
\r
67 void atomic_init(atomic<_Ty> *, _Ty) throw(); // TRANSITION, VSO#174686
\r
68 template <class _Ty>
\r
69 void atomic_store(volatile atomic<_Ty> *, _Ty) throw(); // TRANSITION, VSO#174686
\r
70 template <class _Ty>
\r
71 void atomic_store(atomic<_Ty> *, _Ty) throw(); // TRANSITION, VSO#174686
\r
72 template <class _Ty>
\r
73 void atomic_store_explicit(volatile atomic<_Ty> *, _Ty,
\r
74 memory_order) throw(); // TRANSITION, VSO#174686
\r
75 template <class _Ty>
\r
76 void atomic_store_explicit(atomic<_Ty> *, _Ty,
\r
77 memory_order) throw(); // TRANSITION, VSO#174686
\r
78 template <class _Ty>
\r
79 _Ty atomic_load(const volatile atomic<_Ty> *) throw(); // TRANSITION, VSO#174686
\r
80 template <class _Ty>
\r
81 _Ty atomic_load(const atomic<_Ty> *) throw(); // TRANSITION, VSO#174686
\r
82 template <class _Ty>
\r
83 _Ty atomic_load_explicit(const volatile atomic<_Ty> *,
\r
84 memory_order) throw(); // TRANSITION, VSO#174686
\r
85 template <class _Ty>
\r
86 _Ty atomic_load_explicit(const atomic<_Ty> *,
\r
87 memory_order) throw(); // TRANSITION, VSO#174686
\r
88 template <class _Ty>
\r
89 _Ty atomic_exchange(volatile atomic<_Ty> *, _Ty) throw(); // TRANSITION, VSO#174686
\r
90 template <class _Ty>
\r
91 _Ty atomic_exchange(atomic<_Ty> *, _Ty) throw(); // TRANSITION, VSO#174686
\r
92 template <class _Ty>
\r
93 _Ty atomic_exchange_explicit(volatile atomic<_Ty> *, _Ty,
\r
94 memory_order) throw(); // TRANSITION, VSO#174686
\r
95 template <class _Ty>
\r
96 _Ty atomic_exchange_explicit(atomic<_Ty> *, _Ty,
\r
97 memory_order) throw(); // TRANSITION, VSO#174686
\r
98 template <class _Ty>
\r
99 bool atomic_compare_exchange_weak(volatile atomic<_Ty> *,
\r
100 _Ty *, _Ty) throw(); // TRANSITION, VSO#174686
\r
101 template <class _Ty>
\r
102 bool atomic_compare_exchange_weak(atomic<_Ty> *,
\r
103 _Ty *, _Ty) throw(); // TRANSITION, VSO#174686
\r
104 template <class _Ty>
\r
105 bool atomic_compare_exchange_weak_explicit(
\r
106 volatile atomic<_Ty> *, _Ty *, _Ty,
\r
107 memory_order, memory_order) throw(); // TRANSITION, VSO#174686
\r
108 template <class _Ty>
\r
109 bool atomic_compare_exchange_weak_explicit(
\r
110 atomic<_Ty> *, _Ty *, _Ty,
\r
111 memory_order, memory_order) throw(); // TRANSITION, VSO#174686
\r
112 template <class _Ty>
\r
113 bool atomic_compare_exchange_strong(volatile atomic<_Ty> *,
\r
114 _Ty *, _Ty) throw(); // TRANSITION, VSO#174686
\r
115 template <class _Ty>
\r
116 bool atomic_compare_exchange_strong(atomic<_Ty> *,
\r
117 _Ty *, _Ty) throw(); // TRANSITION, VSO#174686
\r
118 template <class _Ty>
\r
119 bool atomic_compare_exchange_strong_explicit(
\r
120 volatile atomic<_Ty> *, _Ty *, _Ty,
\r
121 memory_order, memory_order) throw(); // TRANSITION, VSO#174686
\r
122 template <class _Ty>
\r
123 bool atomic_compare_exchange_strong_explicit(
\r
124 atomic<_Ty> *, _Ty *, _Ty,
\r
125 memory_order, memory_order) throw(); // TRANSITION, VSO#174686
\r
127 // TEMPLATED OPERATIONS ON ATOMIC TYPES (DECLARED BUT NOT DEFINED)
\r
128 template <class _Ty>
\r
129 _Ty atomic_fetch_add(volatile atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
130 template <class _Ty>
\r
131 _Ty atomic_fetch_add(atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
132 template <class _Ty>
\r
133 _Ty atomic_fetch_add_explicit(volatile atomic<_Ty>*, _Ty,
\r
134 memory_order) throw(); // TRANSITION, VSO#174686
\r
135 template <class _Ty>
\r
136 _Ty atomic_fetch_add_explicit(atomic<_Ty>*, _Ty,
\r
137 memory_order) throw(); // TRANSITION, VSO#174686
\r
138 template <class _Ty>
\r
139 _Ty atomic_fetch_sub(volatile atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
140 template <class _Ty>
\r
141 _Ty atomic_fetch_sub(atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
142 template <class _Ty>
\r
143 _Ty atomic_fetch_sub_explicit(volatile atomic<_Ty>*, _Ty,
\r
144 memory_order) throw(); // TRANSITION, VSO#174686
\r
145 template <class _Ty>
\r
146 _Ty atomic_fetch_sub_explicit(atomic<_Ty>*, _Ty,
\r
147 memory_order) throw(); // TRANSITION, VSO#174686
\r
148 template <class _Ty>
\r
149 _Ty atomic_fetch_and(volatile atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
150 template <class _Ty>
\r
151 _Ty atomic_fetch_and(atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
152 template <class _Ty>
\r
153 _Ty atomic_fetch_and_explicit(volatile atomic<_Ty>*, _Ty,
\r
154 memory_order) throw(); // TRANSITION, VSO#174686
\r
155 template <class _Ty>
\r
156 _Ty atomic_fetch_and_explicit(atomic<_Ty>*, _Ty,
\r
157 memory_order) throw(); // TRANSITION, VSO#174686
\r
158 template <class _Ty>
\r
159 _Ty __cdecl atomic_fetch_or( volatile atomic<_Ty>* p1, _Ty p2 ) throw( ) // TRANSITION, VSO#174686
\r
162 *p1 = (_Ty)((long)*p1 | (long)p2);
\r
166 template <class _Ty>
\r
167 _Ty __cdecl atomic_fetch_or(atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
168 template <class _Ty>
\r
169 _Ty atomic_fetch_or_explicit(volatile atomic<_Ty>*, _Ty,
\r
170 memory_order) throw(); // TRANSITION, VSO#174686
\r
171 template <class _Ty>
\r
172 _Ty atomic_fetch_or_explicit(atomic<_Ty>*, _Ty,
\r
173 memory_order) throw(); // TRANSITION, VSO#174686
\r
174 template <class _Ty>
\r
175 _Ty atomic_fetch_xor(volatile atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
176 template <class _Ty>
\r
177 _Ty atomic_fetch_xor(atomic<_Ty>*, _Ty) throw(); // TRANSITION, VSO#174686
\r
178 template <class _Ty>
\r
179 _Ty atomic_fetch_xor_explicit(volatile atomic<_Ty>*, _Ty,
\r
180 memory_order) throw(); // TRANSITION, VSO#174686
\r
181 template <class _Ty>
\r
182 _Ty atomic_fetch_xor_explicit(atomic<_Ty>*, _Ty,
\r
183 memory_order) throw(); // TRANSITION, VSO#174686
\r
185 // STRUCT atomic_flag
\r
186 #define ATOMIC_FLAG_INIT {0}
\r
187 typedef struct atomic_flag
\r
188 { // structure for managing flag with test-and-set semantics
\r
189 bool test_and_set(memory_order _Order = memory_order_seq_cst)
\r
190 volatile throw(); // TRANSITION, VSO#174686
\r
191 bool test_and_set(memory_order _Order = memory_order_seq_cst) throw(); // TRANSITION, VSO#174686
\r
192 void clear(memory_order _Order = memory_order_seq_cst)
\r
193 volatile throw(); // TRANSITION, VSO#174686
\r
194 void clear(memory_order _Order = memory_order_seq_cst) throw(); // TRANSITION, VSO#174686
\r
196 _Atomic_flag_t _My_flag;
\r
198 atomic_flag() _NOEXCEPT = default;
\r
199 atomic_flag(const atomic_flag&) = delete;
\r
200 atomic_flag& operator=(const atomic_flag&) = delete;
\r
201 atomic_flag& operator=(const atomic_flag&) volatile = delete;
\r
204 inline bool atomic_flag::test_and_set(memory_order _Order)
\r
205 volatile throw() // TRANSITION, VSO#174686
\r
206 { // atomically set *this to true and return previous value
\r
207 return (_Atomic_flag_test_and_set(&_My_flag, _Order));
\r
210 inline bool atomic_flag::test_and_set(memory_order _Order) throw() // TRANSITION, VSO#174686
\r
211 { // atomically set *this to true and return previous value
\r
212 return (_Atomic_flag_test_and_set(&_My_flag, _Order));
\r
215 inline void atomic_flag::clear(memory_order _Order)
\r
216 volatile throw() // TRANSITION, VSO#174686
\r
217 { // atomically clear *this
\r
218 _Atomic_flag_clear(&_My_flag, _Order);
\r
221 inline void atomic_flag::clear(memory_order _Order) throw() // TRANSITION, VSO#174686
\r
222 { // atomically clear *this
\r
223 _Atomic_flag_clear(&_My_flag, _Order);
\r
226 inline bool atomic_flag_test_and_set(volatile atomic_flag *_Flag) throw() // TRANSITION, VSO#174686
\r
227 { // atomically set *_Flag to true and return previous value
\r
228 return (_Atomic_flag_test_and_set(&_Flag->_My_flag, memory_order_seq_cst));
\r
231 inline bool atomic_flag_test_and_set(atomic_flag *_Flag) throw() // TRANSITION, VSO#174686
\r
232 { // atomically set *_Flag to true and return previous value
\r
233 return (_Atomic_flag_test_and_set(&_Flag->_My_flag, memory_order_seq_cst));
\r
236 inline bool atomic_flag_test_and_set_explicit(
\r
237 volatile atomic_flag *_Flag, memory_order _Order) throw() // TRANSITION, VSO#174686
\r
238 { // atomically set *_Flag to true and return previous value
\r
239 return (_Atomic_flag_test_and_set(&_Flag->_My_flag, _Order));
\r
242 inline bool atomic_flag_test_and_set_explicit(
\r
243 atomic_flag *_Flag, memory_order _Order) throw() // TRANSITION, VSO#174686
\r
244 { // atomically set *_Flag to true and return previous value
\r
245 return (_Atomic_flag_test_and_set(&_Flag->_My_flag, _Order));
\r
248 inline void atomic_flag_clear(volatile atomic_flag *_Flag) throw() // TRANSITION, VSO#174686
\r
249 { // atomically clear *_Flag
\r
250 _Atomic_flag_clear(&_Flag->_My_flag, memory_order_seq_cst);
\r
253 inline void atomic_flag_clear(atomic_flag *_Flag) throw() // TRANSITION, VSO#174686
\r
254 { // atomically clear *_Flag
\r
255 _Atomic_flag_clear(&_Flag->_My_flag, memory_order_seq_cst);
\r
258 inline void atomic_flag_clear_explicit(
\r
259 volatile atomic_flag *_Flag, memory_order _Order) throw() // TRANSITION, VSO#174686
\r
260 { // atomically clear *_Flag
\r
261 _Atomic_flag_clear(&_Flag->_My_flag, _Order);
\r
264 inline void atomic_flag_clear_explicit(
\r
265 atomic_flag *_Flag, memory_order _Order) throw() // TRANSITION, VSO#174686
\r
266 { // atomically clear *_Flag
\r
267 _Atomic_flag_clear(&_Flag->_My_flag, _Order);
\r
270 // TEMPLATE CLASS _Atomic_impl
\r
271 template<unsigned _Bytes>
\r
272 struct _Atomic_impl
\r
273 { // struct for managing locks around operations on atomic types
\r
274 typedef _Uint1_t _My_int; // "1 byte" means "no alignment required"
\r
276 constexpr _Atomic_impl() throw() // TRANSITION, VSO#174686
\r
278 { // default constructor
\r
281 bool _Is_lock_free() const volatile
\r
282 { // operations that use locks are not lock-free
\r
286 void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
\r
287 { // lock and store
\r
288 _Atomic_copy(&_My_flag, _Bytes, _Tgt, _Src, _Order);
\r
291 void _Load(void *_Tgt, const void *_Src,
\r
292 memory_order _Order) const volatile
\r
294 _Atomic_copy(&_My_flag, _Bytes, _Tgt, _Src, _Order);
\r
297 void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
\r
298 { // lock and exchange
\r
299 _Atomic_exchange(&_My_flag, _Bytes, _Left, _Right, _Order);
\r
302 bool _Compare_exchange_weak(
\r
303 void *_Tgt, void *_Exp, const void *_Value,
\r
304 memory_order _Order1, memory_order _Order2) volatile
\r
305 { // lock and compare/exchange
\r
306 return (_Atomic_compare_exchange_weak(
\r
307 &_My_flag, _Bytes, _Tgt, _Exp, _Value, _Order1, _Order2));
\r
310 bool _Compare_exchange_strong(
\r
311 void *_Tgt, void *_Exp, const void *_Value,
\r
312 memory_order _Order1, memory_order _Order2) volatile
\r
313 { // lock and compare/exchange
\r
314 return (_Atomic_compare_exchange_strong(
\r
315 &_My_flag, _Bytes, _Tgt, _Exp, _Value, _Order1, _Order2));
\r
319 mutable _Atomic_flag_t _My_flag;
\r
322 // SPECIALIZATIONS OF _Atomic_impl
\r
325 struct _Atomic_impl<1U>
\r
326 { // struct for managing lock-free operations on 1-byte atomic types
\r
327 typedef _Uint1_t _My_int;
\r
329 bool _Is_lock_free() const volatile
\r
330 { // all operations are lock-free
\r
334 void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
\r
336 _Atomic_store_1((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
\r
339 void _Load(void *_Tgt, const void *_Src,
\r
340 memory_order _Order) const volatile
\r
342 *(_My_int*)_Tgt = _Atomic_load_1((_My_int*)_Src, _Order);
\r
345 void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
\r
347 *(_My_int*)_Right =
\r
348 _Atomic_exchange_1((_My_int*)_Left, *(_My_int*)_Right, _Order);
\r
351 bool _Compare_exchange_weak(
\r
352 void *_Tgt, void *_Exp, const void *_Value,
\r
353 memory_order _Order1, memory_order _Order2) volatile
\r
354 { // compare/exchange
\r
355 return (_Atomic_compare_exchange_weak_1(
\r
356 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
357 _Order1, _Order2));
\r
360 bool _Compare_exchange_strong(
\r
361 void *_Tgt, void *_Exp, const void *_Value,
\r
362 memory_order _Order1, memory_order _Order2) volatile
\r
363 { // compare/exchange
\r
364 return (_Atomic_compare_exchange_strong_1(
\r
365 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
366 _Order1, _Order2));
\r
371 struct _Atomic_impl<2U>
\r
372 { // struct for managing lock-free operations on 2-byte atomic types
\r
373 typedef _Uint2_t _My_int;
\r
375 bool _Is_lock_free() const volatile
\r
376 { // all operations are lock-free
\r
380 void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
\r
382 _Atomic_store_2((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
\r
385 void _Load(void *_Tgt, const void *_Src,
\r
386 memory_order _Order) const volatile
\r
388 *(_My_int*)_Tgt = _Atomic_load_2((_My_int*)_Src, _Order);
\r
391 void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
\r
393 *(_My_int*)_Right =
\r
394 _Atomic_exchange_2((_My_int*)_Left, *(_My_int*)_Right, _Order);
\r
397 bool _Compare_exchange_weak(
\r
398 void *_Tgt, void *_Exp, const void *_Value,
\r
399 memory_order _Order1, memory_order _Order2) volatile
\r
400 { // compare/exchange
\r
401 return (_Atomic_compare_exchange_weak_2(
\r
402 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
403 _Order1, _Order2));
\r
406 bool _Compare_exchange_strong(
\r
407 void *_Tgt, void *_Exp, const void *_Value,
\r
408 memory_order _Order1, memory_order _Order2) volatile
\r
409 { // compare/exchange
\r
410 return (_Atomic_compare_exchange_strong_2(
\r
411 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
412 _Order1, _Order2));
\r
417 struct _Atomic_impl<4U>
\r
418 { // struct for managing lock-free operations on 4-byte atomic types
\r
419 typedef _Uint4_t _My_int;
\r
421 bool _Is_lock_free() const volatile
\r
422 { // all operations are lock-free
\r
426 void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
\r
428 _Atomic_store_4((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
\r
431 void _Load(void *_Tgt, const void *_Src,
\r
432 memory_order _Order) const volatile
\r
434 *(_My_int*)_Tgt = _Atomic_load_4((_My_int*)_Src, _Order);
\r
437 void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
\r
439 *(_My_int*)_Right =
\r
440 _Atomic_exchange_4((_My_int*)_Left, *(_My_int*)_Right, _Order);
\r
443 bool _Compare_exchange_weak(
\r
444 void *_Tgt, void *_Exp, const void *_Value,
\r
445 memory_order _Order1, memory_order _Order2) volatile
\r
446 { // compare/exchange
\r
447 return (_Atomic_compare_exchange_weak_4(
\r
448 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
449 _Order1, _Order2));
\r
452 bool _Compare_exchange_strong(
\r
453 void *_Tgt, void *_Exp, const void *_Value,
\r
454 memory_order _Order1, memory_order _Order2) volatile
\r
455 { // compare/exchange
\r
456 return (_Atomic_compare_exchange_strong_4(
\r
457 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
458 _Order1, _Order2));
\r
463 struct _Atomic_impl<8U>
\r
464 { // struct for managing lock-free operations on 8-byte atomic types
\r
465 typedef _Uint8_t _My_int;
\r
467 bool _Is_lock_free() const volatile
\r
468 { // all operations are lock-free
\r
472 void _Store(void *_Tgt, const void *_Src, memory_order _Order) volatile
\r
474 _Atomic_store_8((_My_int*)_Tgt, *(_My_int*)_Src, _Order);
\r
477 void _Load(void *_Tgt, const void *_Src,
\r
478 memory_order _Order) const volatile
\r
480 *(_My_int*)_Tgt = _Atomic_load_8((_My_int*)_Src, _Order);
\r
483 void _Exchange(void *_Left, void *_Right, memory_order _Order) volatile
\r
485 *(_My_int*)_Right =
\r
486 _Atomic_exchange_8((_My_int*)_Left, *(_My_int*)_Right, _Order);
\r
489 bool _Compare_exchange_weak(
\r
490 void *_Tgt, void *_Exp, const void *_Value,
\r
491 memory_order _Order1, memory_order _Order2) volatile
\r
492 { // compare/exchange
\r
493 return (_Atomic_compare_exchange_weak_8(
\r
494 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
495 _Order1, _Order2));
\r
498 bool _Compare_exchange_strong(
\r
499 void *_Tgt, void *_Exp, const void *_Value,
\r
500 memory_order _Order1, memory_order _Order2) volatile
\r
501 { // compare/exchange
\r
502 return (_Atomic_compare_exchange_strong_8(
\r
503 (_My_int*)_Tgt, (_My_int*)_Exp, *(_My_int*)_Value,
\r
504 _Order1, _Order2));
\r
508 // TEMPLATE CLASS _Atomic_base
\r
509 template<class _Ty,
\r
511 struct _Atomic_base
\r
512 : _Atomic_impl<_Bytes>
\r
513 { // struct that defines most member functions of std::atomic
\r
514 typedef _Atomic_impl<_Bytes> _Mybase;
\r
515 typedef typename _Mybase::_My_int _My_int;
\r
517 constexpr _Atomic_base(_Ty _Val) throw() // TRANSITION, VSO#174686
\r
519 { // construct from _Val, initialization is not an atomic operation
\r
522 _Atomic_base() _NOEXCEPT = default;
\r
523 _Atomic_base(const _Atomic_base&) = delete;
\r
524 _Atomic_base& operator=(const _Atomic_base&) = delete;
\r
525 _Atomic_base& operator=(const _Atomic_base&) volatile = delete;
\r
527 _Ty operator=(_Ty _Right) volatile throw() // TRANSITION, VSO#174686
\r
528 { // assign from _Right
\r
529 this->_Store((void *)_STD addressof(_My_val), _STD addressof(_Right), memory_order_seq_cst);
\r
533 _Ty operator=(_Ty _Right) throw() // TRANSITION, VSO#174686
\r
534 { // assign from _Right
\r
535 this->_Store((void *)_STD addressof(_My_val), _STD addressof(_Right), memory_order_seq_cst);
\r
539 bool is_lock_free() const volatile throw() // TRANSITION, VSO#174686
\r
540 { // return true if operations are lock-free
\r
541 return (this->_Is_lock_free());
\r
544 bool is_lock_free() const throw() // TRANSITION, VSO#174686
\r
545 { // return true if operations are lock-free
\r
546 return (this->_Is_lock_free());
\r
549 void store(_Ty _Value,
\r
550 memory_order _Order = memory_order_seq_cst) volatile throw() // TRANSITION, VSO#174686
\r
551 { // store _Value into *this
\r
552 this->_Store((void *)_STD addressof(_My_val), _STD addressof(_Value), _Order);
\r
555 void store(_Ty _Value,
\r
556 memory_order _Order = memory_order_seq_cst) throw() // TRANSITION, VSO#174686
\r
557 { // store _Value into *this
\r
558 this->_Store((void *)_STD addressof(_My_val), _STD addressof(_Value), _Order);
\r
561 _Ty load(memory_order _Order = memory_order_seq_cst)
\r
562 const volatile throw() // TRANSITION, VSO#174686
\r
563 { // return value stored in *this
\r
565 this->_Load((void *)_STD addressof(_Result), (const void *)_STD addressof(_My_val), _Order);
\r
569 _Ty load(memory_order _Order = memory_order_seq_cst)
\r
570 const throw() // TRANSITION, VSO#174686
\r
571 { // return value stored in *this
\r
573 this->_Load((void *)_STD addressof(_Result), (const void *)_STD addressof(_My_val), _Order);
\r
577 operator _Ty() const volatile throw() // TRANSITION, VSO#174686
\r
578 { // return value stored in *this
\r
582 operator _Ty() const throw() // TRANSITION, VSO#174686
\r
583 { // return value stored in *this
\r
587 _Ty exchange(_Ty _Value,
\r
588 memory_order _Order = memory_order_seq_cst) volatile throw() // TRANSITION, VSO#174686
\r
589 { // exchange value stored in *this with _Value
\r
590 this->_Exchange((void *)_STD addressof(_My_val), (void *)_STD addressof(_Value), _Order);
\r
594 _Ty exchange(_Ty _Value,
\r
595 memory_order _Order = memory_order_seq_cst) throw() // TRANSITION, VSO#174686
\r
596 { // exchange value stored in *this with _Value
\r
597 this->_Exchange((void *)_STD addressof(_My_val), (void *)_STD addressof(_Value), _Order);
\r
601 bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
\r
602 memory_order _Order1, memory_order _Order2) volatile throw() // TRANSITION, VSO#174686
\r
603 { // compare and exchange value stored in *this with *_Exp, _Value
\r
604 return (this->_Compare_exchange_weak(
\r
605 (void *)_STD addressof(_My_val), (void *)_STD addressof(_Exp), (const void *)_STD addressof(_Value),
\r
606 _Order1, _Order2));
\r
609 bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
\r
610 memory_order _Order1, memory_order _Order2) throw() // TRANSITION, VSO#174686
\r
611 { // compare and exchange value stored in *this with *_Exp, _Value
\r
612 return (this->_Compare_exchange_weak(
\r
613 (void *)_STD addressof(_My_val), (void *)_STD addressof(_Exp), (const void *)_STD addressof(_Value),
\r
614 _Order1, _Order2));
\r
617 bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
\r
618 memory_order _Order = memory_order_seq_cst) volatile throw() // TRANSITION, VSO#174686
\r
619 { // compare and exchange value stored in *this with *_Exp, _Value
\r
620 return (compare_exchange_weak(
\r
621 _Exp, _Value, _Order, _Get_memory_order(_Order)));
\r
624 bool compare_exchange_weak(_Ty& _Exp, _Ty _Value,
\r
625 memory_order _Order = memory_order_seq_cst) throw() // TRANSITION, VSO#174686
\r
626 { // compare and exchange value stored in *this with *_Exp, _Value
\r
627 return (compare_exchange_weak(
\r
628 _Exp, _Value, _Order, _Get_memory_order(_Order)));
\r
631 bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
\r
632 memory_order _Order1, memory_order _Order2) volatile throw() // TRANSITION, VSO#174686
\r
633 { // compare and exchange value stored in *this with *_Exp, _Value
\r
634 return (this->_Compare_exchange_strong(
\r
635 (void *)_STD addressof(_My_val), (void *)_STD addressof(_Exp), (const void *)_STD addressof(_Value),
\r
636 _Order1, _Order2));
\r
639 bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
\r
640 memory_order _Order1, memory_order _Order2) throw() // TRANSITION, VSO#174686
\r
641 { // compare and exchange value stored in *this with *_Exp, _Value
\r
642 return (this->_Compare_exchange_strong(
\r
643 (void *)_STD addressof(_My_val), (void *)_STD addressof(_Exp), (void *)_STD addressof(_Value),
\r
644 _Order1, _Order2));
\r
647 bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
\r
648 memory_order _Order = memory_order_seq_cst) volatile throw() // TRANSITION, VSO#174686
\r
649 { // compare and exchange value stored in *this with *_Exp, _Value
\r
650 return (compare_exchange_strong(
\r
651 _Exp, _Value, _Order, _Get_memory_order(_Order)));
\r
654 bool compare_exchange_strong(_Ty& _Exp, _Ty _Value,
\r
655 memory_order _Order = memory_order_seq_cst) throw() // TRANSITION, VSO#174686
\r
656 { // compare and exchange value stored in *this with *_Exp, _Value
\r
657 return (compare_exchange_strong(
\r
658 _Exp, _Value, _Order, _Get_memory_order(_Order)));
\r
661 #ifndef _ENABLE_ATOMIC_ALIGNMENT_FIX
\r
662 static_assert(alignof(_Ty) >= sizeof(_My_int),
\r
663 "You've instantiated std::atomic<T> with sizeof(T) equal to 2/4/8 and alignof(T) < sizeof(T). "
\r
664 "Before VS 2015 Update 2, this would have misbehaved at runtime. "
\r
665 "VS 2015 Update 2 was fixed to handle this correctly, "
\r
666 "but the fix inherently changes layout and breaks binary compatibility. "
\r
667 "Please define _ENABLE_ATOMIC_ALIGNMENT_FIX to acknowledge that you understand this, "
\r
668 "and that everything you're linking has been compiled with VS 2015 Update 2 (or later).");
\r
669 #endif /* _ENABLE_ATOMIC_ALIGNMENT_FIX */
\r
671 alignas(sizeof(_My_int)) alignas(_Ty) _Ty _My_val;
\r
674 // TEMPLATE CLASS atomic
\r
675 template<class _Ty>
\r
677 : _Atomic_base<_Ty, sizeof (_Ty)>
\r
678 { // template that manages values of _Ty atomically
\r
679 static_assert(is_trivially_copyable<_Ty>::value,
\r
680 "atomic<T> requires T to be trivially copyable.");
\r
682 typedef _Atomic_base<_Ty, sizeof (_Ty)> _My_base;
\r
684 using value_type = _Ty;
\r
687 static constexpr bool is_always_lock_free =
\r
689 || sizeof(_Ty) == 2
\r
690 || sizeof(_Ty) == 4
\r
691 || sizeof(_Ty) == 8;
\r
692 #endif /* _HAS_CXX17 */
\r
694 atomic() _NOEXCEPT = default;
\r
695 atomic(const atomic&) = delete;
\r
696 atomic& operator=(const atomic&) = delete;
\r
697 atomic& operator=(const atomic&) volatile = delete;
\r
699 constexpr atomic(_Ty _Val) throw() // TRANSITION, VSO#174686
\r
701 { // construct from _Val
\r
704 _Ty operator=(_Ty _Right) volatile throw() // TRANSITION, VSO#174686
\r
705 { // assign from _Right
\r
706 return (_My_base::operator=(_Right));
\r
709 _Ty operator=(_Ty _Right) throw() // TRANSITION, VSO#174686
\r
710 { // assign from _Right
\r
711 return (_My_base::operator=(_Right));
\r
714 #define ATOMIC_VAR_INIT(_Val) { _Val }
\r
715 #define _ATOMIC_ITYPE _Atomic_bool
\r
716 #define _ITYPE bool
\r
718 #include <xxatomic>
\r
720 #define _ATOMIC_ITYPE _Atomic_char
\r
721 #define _ITYPE char
\r
723 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
724 #include <xxatomic>
\r
726 #define _ATOMIC_ITYPE _Atomic_schar
\r
727 #define _ITYPE signed char
\r
729 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
730 #include <xxatomic>
\r
732 #define _ATOMIC_ITYPE _Atomic_uchar
\r
733 #define _ITYPE unsigned char
\r
735 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
736 #include <xxatomic>
\r
738 #define _ATOMIC_ITYPE _Atomic_char16_t
\r
739 #define _ITYPE char16_t
\r
741 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
742 #include <xxatomic>
\r
744 #define _ATOMIC_ITYPE _Atomic_char32_t
\r
745 #define _ITYPE char32_t
\r
747 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
748 #include <xxatomic>
\r
750 #define _ATOMIC_ITYPE _Atomic_wchar_t
\r
751 #define _ITYPE wchar_t
\r
752 #define _ISIZE _WCHAR_T_SIZE
\r
753 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
755 #ifndef _NATIVE_WCHAR_T_DEFINED
\r
756 #define _ATOMIC_HAS_NO_SPECIALIZATION
\r
757 #endif /* _NATIVE_WCHAR_T_DEFINED */
\r
759 #include <xxatomic>
\r
761 #define _ATOMIC_ITYPE _Atomic_short
\r
762 #define _ITYPE short
\r
763 #define _ISIZE _SHORT_SIZE
\r
764 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
765 #include <xxatomic>
\r
767 #define _ATOMIC_ITYPE _Atomic_ushort
\r
768 #define _ITYPE unsigned short
\r
769 #define _ISIZE _SHORT_SIZE
\r
770 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
771 #include <xxatomic>
\r
773 #define _ATOMIC_ITYPE _Atomic_int
\r
775 #define _ISIZE _INT_SIZE
\r
776 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
777 #include <xxatomic>
\r
779 #define _ATOMIC_ITYPE _Atomic_uint
\r
780 #define _ITYPE unsigned int
\r
781 #define _ISIZE _INT_SIZE
\r
782 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
783 #include <xxatomic>
\r
785 #define _ATOMIC_ITYPE _Atomic_long
\r
786 #define _ITYPE long
\r
787 #define _ISIZE _LONG_SIZE
\r
788 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
789 #include <xxatomic>
\r
791 #define _ATOMIC_ITYPE _Atomic_ulong
\r
792 #define _ITYPE unsigned long
\r
793 #define _ISIZE _LONG_SIZE
\r
794 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
795 #include <xxatomic>
\r
797 #define _ATOMIC_ITYPE _Atomic_llong
\r
798 #define _ITYPE long long
\r
799 #define _ISIZE _LONGLONG_SIZE
\r
800 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
801 #include <xxatomic>
\r
803 #define _ATOMIC_ITYPE _Atomic_ullong
\r
804 #define _ITYPE unsigned long long
\r
805 #define _ISIZE _LONGLONG_SIZE
\r
806 #define _ATOMIC_HAS_ARITHMETIC_OPS
\r
807 #include <xxatomic>
\r
809 #define _ATOMIC_ITYPE _Atomic_address
\r
810 #define _ITYPE void *
\r
811 #define _ISIZE _ADDR_SIZE
\r
812 #define _ATOMIC_IS_ADDRESS_TYPE
\r
813 #define _ATOMIC_HAS_NO_SPECIALIZATION
\r
814 #include <xxatomic>
\r
816 // GENERAL OPERATIONS ON ATOMIC TYPES
\r
817 template <class _Ty>
\r
818 inline bool atomic_is_lock_free(
\r
819 const volatile atomic<_Ty> *_Atom) throw() // TRANSITION, VSO#174686
\r
821 return (_Atom->is_lock_free());
\r
824 template <class _Ty>
\r
825 inline bool atomic_is_lock_free(
\r
826 const atomic<_Ty> *_Atom) throw() // TRANSITION, VSO#174686
\r
828 return (_Atom->is_lock_free());
\r
831 template <class _Ty>
\r
832 inline void atomic_init(
\r
833 volatile atomic<_Ty> *_Atom, _Ty _Value) throw() // TRANSITION, VSO#174686
\r
838 template <class _Ty>
\r
839 inline void atomic_init(
\r
840 atomic<_Ty> *_Atom, _Ty _Value) throw() // TRANSITION, VSO#174686
\r
845 template <class _Ty>
\r
846 inline void atomic_store(volatile atomic<_Ty> *_Atom,
\r
847 _Ty _Value) throw() // TRANSITION, VSO#174686
\r
849 _Atom->store(_Value);
\r
852 template <class _Ty>
\r
853 inline void atomic_store(atomic<_Ty> *_Atom,
\r
854 _Ty _Value) throw() // TRANSITION, VSO#174686
\r
856 _Atom->store(_Value);
\r
859 template <class _Ty>
\r
860 inline void atomic_store_explicit(
\r
861 volatile atomic<_Ty> *_Atom, _Ty _Value,
\r
862 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
864 _Atom->store(_Value, _Order);
\r
867 template <class _Ty>
\r
868 inline void atomic_store_explicit(
\r
869 atomic<_Ty> *_Atom, _Ty _Value,
\r
870 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
872 _Atom->store(_Value, _Order);
\r
875 template <class _Ty>
\r
876 inline _Ty atomic_load(const volatile atomic<_Ty> *_Atom) throw() // TRANSITION, VSO#174686
\r
878 return (_Atom->load());
\r
881 template <class _Ty>
\r
882 inline _Ty atomic_load(const atomic<_Ty> *_Atom) throw() // TRANSITION, VSO#174686
\r
884 return (_Atom->load());
\r
887 template <class _Ty>
\r
888 inline _Ty atomic_load_explicit(
\r
889 const volatile atomic<_Ty> *_Atom,
\r
890 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
892 return (_Atom->load(_Order));
\r
895 template <class _Ty>
\r
896 inline _Ty atomic_load_explicit(
\r
897 const atomic<_Ty> *_Atom,
\r
898 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
900 return (_Atom->load(_Order));
\r
903 template <class _Ty>
\r
904 inline _Ty atomic_exchange(volatile atomic<_Ty> *_Atom,
\r
905 _Ty _Value) throw() // TRANSITION, VSO#174686
\r
907 return (_Atom->exchange(_Value));
\r
910 template <class _Ty>
\r
911 inline _Ty atomic_exchange(atomic<_Ty> *_Atom,
\r
912 _Ty _Value) throw() // TRANSITION, VSO#174686
\r
914 return (_Atom->exchange(_Value));
\r
917 template <class _Ty>
\r
918 inline _Ty atomic_exchange_explicit(volatile atomic<_Ty> *_Atom,
\r
919 _Ty _Value, memory_order _Order) throw() // TRANSITION, VSO#174686
\r
921 return (_Atom->exchange(_Value, _Order));
\r
924 template <class _Ty>
\r
925 inline _Ty atomic_exchange_explicit(atomic<_Ty> *_Atom, _Ty _Value,
\r
926 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
928 return (_Atom->exchange(_Value, _Order));
\r
931 template <class _Ty>
\r
932 inline bool atomic_compare_exchange_weak(
\r
933 volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value) throw() // TRANSITION, VSO#174686
\r
935 return (_Atom->compare_exchange_weak(*_Exp, _Value));
\r
938 template <class _Ty>
\r
939 inline bool atomic_compare_exchange_weak(
\r
940 atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value) throw() // TRANSITION, VSO#174686
\r
942 return (_Atom->compare_exchange_weak(*_Exp, _Value));
\r
945 template <class _Ty>
\r
946 inline bool atomic_compare_exchange_weak_explicit(
\r
947 volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
\r
948 memory_order _Order1, memory_order _Order2) throw() // TRANSITION, VSO#174686
\r
950 return (_Atom->compare_exchange_weak(*_Exp, _Value, _Order1, _Order2));
\r
953 template <class _Ty>
\r
954 inline bool atomic_compare_exchange_weak_explicit(
\r
955 atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
\r
956 memory_order _Order1, memory_order _Order2) throw() // TRANSITION, VSO#174686
\r
958 return (_Atom->compare_exchange_weak(*_Exp, _Value, _Order1, _Order2));
\r
961 template <class _Ty>
\r
962 inline bool atomic_compare_exchange_strong(
\r
963 volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value) throw() // TRANSITION, VSO#174686
\r
965 return (_Atom->compare_exchange_strong(*_Exp, _Value));
\r
968 template <class _Ty>
\r
969 inline bool atomic_compare_exchange_strong(
\r
970 atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value) throw() // TRANSITION, VSO#174686
\r
972 return (_Atom->compare_exchange_strong(*_Exp, _Value));
\r
975 template <class _Ty>
\r
976 inline bool atomic_compare_exchange_strong_explicit(
\r
977 volatile atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
\r
978 memory_order _Order1, memory_order _Order2) throw() // TRANSITION, VSO#174686
\r
980 return (_Atom->compare_exchange_strong(*_Exp, _Value, _Order1, _Order2));
\r
983 template <class _Ty>
\r
984 inline bool atomic_compare_exchange_strong_explicit(
\r
985 atomic<_Ty> *_Atom, _Ty *_Exp, _Ty _Value,
\r
986 memory_order _Order1, memory_order _Order2) throw() // TRANSITION, VSO#174686
\r
988 return (_Atom->compare_exchange_strong(*_Exp, _Value, _Order1, _Order2));
\r
991 // OVERLOADS FOR POINTERS
\r
992 template <class _Ty>
\r
993 inline _Ty *atomic_fetch_add(volatile atomic<_Ty*> *_Atom,
\r
994 ptrdiff_t _Value) throw() // TRANSITION, VSO#174686
\r
996 return (_Atom->fetch_add(_Value));
\r
999 template <class _Ty>
\r
1000 inline _Ty *atomic_fetch_add(atomic<_Ty*> *_Atom,
\r
1001 ptrdiff_t _Value) throw() // TRANSITION, VSO#174686
\r
1003 return (_Atom->fetch_add(_Value));
\r
1006 template <class _Ty>
\r
1007 inline _Ty *atomic_fetch_add_explicit(
\r
1008 volatile atomic<_Ty*> *_Atom, ptrdiff_t _Value,
\r
1009 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
1011 return (_Atom->fetch_add(_Value, _Order));
\r
1014 template <class _Ty>
\r
1015 inline _Ty *atomic_fetch_add_explicit(
\r
1016 atomic<_Ty*> *_Atom, ptrdiff_t _Value,
\r
1017 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
1019 return (_Atom->fetch_add(_Value, _Order));
\r
1022 template <class _Ty>
\r
1023 inline _Ty *atomic_fetch_sub(volatile atomic<_Ty*> *_Atom,
\r
1024 ptrdiff_t _Value) throw() // TRANSITION, VSO#174686
\r
1026 return (_Atom->fetch_sub(_Value));
\r
1029 template <class _Ty>
\r
1030 inline _Ty *atomic_fetch_sub(atomic<_Ty*> *_Atom,
\r
1031 ptrdiff_t _Value) throw() // TRANSITION, VSO#174686
\r
1033 return (_Atom->fetch_sub(_Value));
\r
1036 template <class _Ty>
\r
1037 inline _Ty *atomic_fetch_sub_explicit(
\r
1038 volatile atomic<_Ty*> *_Atom, ptrdiff_t _Value,
\r
1039 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
1041 return (_Atom->fetch_sub(_Value, _Order));
\r
1044 template <class _Ty>
\r
1045 inline _Ty *atomic_fetch_sub_explicit(
\r
1046 atomic<_Ty*> *_Atom, ptrdiff_t _Value,
\r
1047 memory_order _Order) throw() // TRANSITION, VSO#174686
\r
1049 return (_Atom->fetch_sub(_Value, _Order));
\r
1053 extern "C" inline void atomic_thread_fence(memory_order _Order) throw() // TRANSITION, VSO#174686
\r
1054 { // force memory visibility and inhibit compiler reordering
\r
1055 return (_Atomic_thread_fence(_Order));
\r
1058 extern "C" inline void atomic_signal_fence(memory_order _Order) throw() // TRANSITION, VSO#174686
\r
1059 { // force memory visibility and inhibit compiler reordering
\r
1060 return (_Atomic_signal_fence(_Order));
\r
1063 // ATOMIC TYPEDEFS
\r
1064 typedef atomic<bool> atomic_bool;
\r
1066 typedef atomic<char> atomic_char;
\r
1067 typedef atomic<signed char> atomic_schar;
\r
1068 typedef atomic<unsigned char> atomic_uchar;
\r
1069 typedef atomic<short> atomic_short;
\r
1070 typedef atomic<unsigned short> atomic_ushort;
\r
1071 typedef atomic<int> atomic_int;
\r
1072 typedef atomic<unsigned int> atomic_uint;
\r
1073 typedef atomic<long> atomic_long;
\r
1074 typedef atomic<unsigned long> atomic_ulong;
\r
1075 typedef atomic<long long> atomic_llong;
\r
1076 typedef atomic<unsigned long long> atomic_ullong;
\r
1078 typedef atomic<char16_t> atomic_char16_t;
\r
1079 typedef atomic<char32_t> atomic_char32_t;
\r
1081 typedef atomic<wchar_t> atomic_wchar_t;
\r
1083 typedef atomic<int8_t> atomic_int8_t;
\r
1084 typedef atomic<uint8_t> atomic_uint8_t;
\r
1085 typedef atomic<int16_t> atomic_int16_t;
\r
1086 typedef atomic<uint16_t> atomic_uint16_t;
\r
1087 typedef atomic<int32_t> atomic_int32_t;
\r
1088 typedef atomic<uint32_t> atomic_uint32_t;
\r
1089 typedef atomic<int64_t> atomic_int64_t;
\r
1090 typedef atomic<uint64_t> atomic_uint64_t;
\r
1092 typedef atomic<int_least8_t> atomic_int_least8_t;
\r
1093 typedef atomic<uint_least8_t> atomic_uint_least8_t;
\r
1094 typedef atomic<int_least16_t> atomic_int_least16_t;
\r
1095 typedef atomic<uint_least16_t> atomic_uint_least16_t;
\r
1096 typedef atomic<int_least32_t> atomic_int_least32_t;
\r
1097 typedef atomic<uint_least32_t> atomic_uint_least32_t;
\r
1098 typedef atomic<int_least64_t> atomic_int_least64_t;
\r
1099 typedef atomic<uint_least64_t> atomic_uint_least64_t;
\r
1101 typedef atomic<int_fast8_t> atomic_int_fast8_t;
\r
1102 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
\r
1103 typedef atomic<int_fast16_t> atomic_int_fast16_t;
\r
1104 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
\r
1105 typedef atomic<int_fast32_t> atomic_int_fast32_t;
\r
1106 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
\r
1107 typedef atomic<int_fast64_t> atomic_int_fast64_t;
\r
1108 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
\r
1110 typedef atomic<intptr_t> atomic_intptr_t;
\r
1111 typedef atomic<uintptr_t> atomic_uintptr_t;
\r
1112 typedef atomic<size_t> atomic_size_t;
\r
1113 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
\r
1114 typedef atomic<intmax_t> atomic_intmax_t;
\r
1115 typedef atomic<uintmax_t> atomic_uintmax_t;
\r
1117 #pragma pop_macro("new")
\r
1118 #pragma warning(pop)
\r
1120 #endif /* RC_INVOKED */
\r
1121 #endif /* _ATOMIC_ */
\r
1124 * Copyright (c) by P.J. Plauger. All rights reserved.
\r
1125 * Consult your license regarding permissions and restrictions.
\r