#include <stdint.h>
#include <bits/atomic_lockfree_defines.h>
+#ifndef _GLIBCXX_ALWAYS_INLINE
+#define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
+#endif
+
namespace std _GLIBCXX_VISIBILITY(default)
{
_GLIBCXX_BEGIN_NAMESPACE_VERSION
| (__m & __memory_order_modifier_mask));
}
- inline void
+ _GLIBCXX_ALWAYS_INLINE void
atomic_thread_fence(memory_order __m) noexcept
{ __atomic_thread_fence(__m); }
- inline void
+ _GLIBCXX_ALWAYS_INLINE void
atomic_signal_fence(memory_order __m) noexcept
{ __atomic_signal_fence(__m); }
: __atomic_flag_base{ _S_init(__i) }
{ }
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
test_and_set(memory_order __m = memory_order_seq_cst) noexcept
{
return __atomic_test_and_set (&_M_i, __m);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
{
return __atomic_test_and_set (&_M_i, __m);
}
- void
+ _GLIBCXX_ALWAYS_INLINE void
clear(memory_order __m = memory_order_seq_cst) noexcept
{
memory_order __b = __m & __memory_order_mask;
__atomic_clear (&_M_i, __m);
}
- void
+ _GLIBCXX_ALWAYS_INLINE void
clear(memory_order __m = memory_order_seq_cst) volatile noexcept
{
memory_order __b = __m & __memory_order_mask;
is_lock_free() const volatile noexcept
{ return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
- void
+ _GLIBCXX_ALWAYS_INLINE void
store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
{
memory_order __b = __m & __memory_order_mask;
__atomic_store_n(&_M_i, __i, __m);
}
- void
+ _GLIBCXX_ALWAYS_INLINE void
store(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__atomic_store_n(&_M_i, __i, __m);
}
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
load(memory_order __m = memory_order_seq_cst) const noexcept
{
memory_order __b = __m & __memory_order_mask;
return __atomic_load_n(&_M_i, __m);
}
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
memory_order __b = __m & __memory_order_mask;
return __atomic_load_n(&_M_i, __m);
}
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{
}
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
exchange(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return __atomic_exchange_n(&_M_i, __i, __m);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) noexcept
{
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m1,
memory_order __m2) volatile noexcept
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) noexcept
{
__cmpexch_failure_order(__m));
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_weak(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__cmpexch_failure_order(__m));
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1, memory_order __m2) noexcept
{
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m1,
memory_order __m2) volatile noexcept
return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) noexcept
{
__cmpexch_failure_order(__m));
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__int_type& __i1, __int_type __i2,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__cmpexch_failure_order(__m));
}
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_add(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_add(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_add(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_sub(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_sub(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_sub(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_and(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_and(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_and(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_or(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_or(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_or(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_xor(&_M_i, __i, __m); }
- __int_type
+ _GLIBCXX_ALWAYS_INLINE __int_type
fetch_xor(__int_type __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_xor(&_M_i, __i, __m); }
is_lock_free() const volatile noexcept
{ return __atomic_is_lock_free(_M_type_size(1), nullptr); }
- void
+ _GLIBCXX_ALWAYS_INLINE void
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
__atomic_store_n(&_M_p, __p, __m);
}
- void
+ _GLIBCXX_ALWAYS_INLINE void
store(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
__atomic_store_n(&_M_p, __p, __m);
}
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
load(memory_order __m = memory_order_seq_cst) const noexcept
{
memory_order __b = __m & __memory_order_mask;
return __atomic_load_n(&_M_p, __m);
}
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
load(memory_order __m = memory_order_seq_cst) const volatile noexcept
{
memory_order __b = __m & __memory_order_mask;
return __atomic_load_n(&_M_p, __m);
}
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) noexcept
{
}
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
exchange(__pointer_type __p,
memory_order __m = memory_order_seq_cst) volatile noexcept
{
return __atomic_exchange_n(&_M_p, __p, __m);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
memory_order __m1,
memory_order __m2) noexcept
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
}
- bool
+ _GLIBCXX_ALWAYS_INLINE bool
compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
memory_order __m1,
memory_order __m2) volatile noexcept
return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
}
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_add(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) noexcept
{ return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
- __pointer_type
+ _GLIBCXX_ALWAYS_INLINE __pointer_type
fetch_sub(ptrdiff_t __d,
memory_order __m = memory_order_seq_cst) volatile noexcept
{ return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }