6 #ifndef EASTL_ATOMIC_INTERNAL_SIZE_ALIGNED_H
7 #define EASTL_ATOMIC_INTERNAL_SIZE_ALIGNED_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
14 #include "atomic_push_compiler_options.h"
25 #define EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_IMPL(funcName) \
26 template <typename OrderSuccess, typename OrderFailure> \
27 bool funcName(T& , T , \
29 OrderFailure ) EA_NOEXCEPT \
31 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
35 template <typename OrderSuccess, typename OrderFailure> \
36 bool funcName(T& , T , \
38 OrderFailure ) volatile EA_NOEXCEPT \
40 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
44 template <typename Order> \
45 bool funcName(T& , T , \
48 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
52 template <typename Order> \
53 bool funcName(T& , T , \
54 Order ) volatile EA_NOEXCEPT \
56 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
60 bool funcName(T& , T ) volatile EA_NOEXCEPT \
62 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
66 #define EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_WEAK_IMPL() \
67 EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_IMPL(compare_exchange_weak)
69 #define EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_STRONG_IMPL() \
70 EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_IMPL(compare_exchange_strong)
83 EA_CONSTEXPR
atomic_size_aligned() EA_NOEXCEPT_IF(eastl::is_nothrow_default_constructible_v<T>)
92 template <
typename Order>
93 void store(T , Order ) EA_NOEXCEPT
95 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T);
98 template <
typename Order>
99 void store(T , Order )
volatile EA_NOEXCEPT
101 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
104 void store(T )
volatile EA_NOEXCEPT
106 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
111 template <
typename Order>
112 T load(Order )
const EA_NOEXCEPT
114 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T);
117 template <
typename Order>
118 T load(Order )
const volatile EA_NOEXCEPT
120 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
123 T load()
const volatile EA_NOEXCEPT
125 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
130 template <
typename Order>
131 T exchange(T , Order ) EA_NOEXCEPT
133 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T);
136 template <
typename Order>
137 T exchange(T , Order )
volatile EA_NOEXCEPT
139 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
142 T exchange(T )
volatile EA_NOEXCEPT
144 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
149 EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_WEAK_IMPL()
153 EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_STRONG_IMPL()
157 T operator=(T )
volatile EA_NOEXCEPT
159 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
167 T* GetAtomicAddress()
const EA_NOEXCEPT
184 EA_ALIGN(
sizeof(T))
mutable T mAtomic;
194 #include "atomic_pop_compiler_options.h"
EA Standard Template Library.
Definition: algorithm.h:288
T * addressof(T &value) EA_NOEXCEPT
Definition: memory_base.h:29
Definition: atomic_size_aligned.h:75