6 #ifndef EASTL_ATOMIC_INTERNAL_BASE_WIDTH_H
7 #define EASTL_ATOMIC_INTERNAL_BASE_WIDTH_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
14 #include "atomic_push_compiler_options.h"
25 template <
typename T,
unsigned w
idth = sizeof(T)>
37 #define EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) \
38 EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_FIXED_WIDTH_TYPE_, bits)
41 #define EASTL_ATOMIC_STORE_FUNC_IMPL(op, bits) \
42 EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) fixedWidthDesired = EASTL_ATOMIC_TYPE_PUN_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), desired); \
43 EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
44 EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress()), \
48 #define EASTL_ATOMIC_LOAD_FUNC_IMPL(op, bits) \
49 EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) retVal; \
50 EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
52 EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress())); \
53 return EASTL_ATOMIC_TYPE_PUN_CAST(T, retVal);
56 #define EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(op, bits) \
57 EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) retVal; \
58 EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) fixedWidthDesired = EASTL_ATOMIC_TYPE_PUN_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), desired); \
59 EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
61 EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress()), \
63 return EASTL_ATOMIC_TYPE_PUN_CAST(T, retVal);
66 #define EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(op, bits) \
68 EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) fixedWidthDesired = EASTL_ATOMIC_TYPE_PUN_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), desired); \
69 EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
71 EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress()), \
72 EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), &expected), \
77 #define EASTL_ATOMIC_BASE_OP_JOIN(op, Order) \
78 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_, op), Order)
81 #define EASTL_ATOMIC_BASE_CMPXCHG_FUNCS_IMPL(funcName, cmpxchgOp, bits) \
82 using Base::funcName; \
84 bool funcName(T& expected, T desired) EA_NOEXCEPT \
86 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_), bits); \
89 bool funcName(T& expected, T desired, \
90 eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
92 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELAXED_), bits); \
95 bool funcName(T& expected, T desired, \
96 eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
98 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQUIRE_), bits); \
101 bool funcName(T& expected, T desired, \
102 eastl::internal::memory_order_release_s) EA_NOEXCEPT \
104 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELEASE_), bits); \
107 bool funcName(T& expected, T desired, \
108 eastl::internal::memory_order_acq_rel_s) EA_NOEXCEPT \
110 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQ_REL_), bits); \
113 bool funcName(T& expected, T desired, \
114 eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
116 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_), bits); \
119 bool funcName(T& expected, T desired, \
120 eastl::internal::memory_order_relaxed_s, \
121 eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
123 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELAXED_RELAXED_), bits); \
126 bool funcName(T& expected, T desired, \
127 eastl::internal::memory_order_acquire_s, \
128 eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
130 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQUIRE_RELAXED_), bits); \
133 bool funcName(T& expected, T desired, \
134 eastl::internal::memory_order_acquire_s, \
135 eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
137 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQUIRE_ACQUIRE_), bits); \
140 bool funcName(T& expected, T desired, \
141 eastl::internal::memory_order_release_s, \
142 eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
144 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELEASE_RELAXED_), bits); \
147 bool funcName(T& expected, T desired, \
148 eastl::internal::memory_order_acq_rel_s, \
149 eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
151 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQ_REL_RELAXED_), bits); \
154 bool funcName(T& expected, T desired, \
155 eastl::internal::memory_order_acq_rel_s, \
156 eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
158 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQ_REL_ACQUIRE_), bits); \
161 bool funcName(T& expected, T desired, \
162 eastl::internal::memory_order_seq_cst_s, \
163 eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
165 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_RELAXED_), bits); \
168 bool funcName(T& expected, T desired, \
169 eastl::internal::memory_order_seq_cst_s, \
170 eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
172 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_ACQUIRE_), bits); \
175 bool funcName(T& expected, T desired, \
176 eastl::internal::memory_order_seq_cst_s, \
177 eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
179 EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_SEQ_CST_), bits); \
182 #define EASTL_ATOMIC_BASE_CMPXCHG_WEAK_FUNCS_IMPL(bits) \
183 EASTL_ATOMIC_BASE_CMPXCHG_FUNCS_IMPL(compare_exchange_weak, CMPXCHG_WEAK, bits)
185 #define EASTL_ATOMIC_BASE_CMPXCHG_STRONG_FUNCS_IMPL(bits) \
186 EASTL_ATOMIC_BASE_CMPXCHG_FUNCS_IMPL(compare_exchange_strong, CMPXCHG_STRONG, bits)
189 #define EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(bytes, bits) \
190 template <typename T> \
191 struct atomic_base_width<T, bytes> : public atomic_size_aligned<T> \
195 static_assert(EA_ALIGN_OF(atomic_size_aligned<T>) == bytes, "eastl::atomic<T> must be sizeof(T) aligned!"); \
196 static_assert(EA_ALIGN_OF(atomic_size_aligned<T>) == sizeof(T), "eastl::atomic<T> must be sizeof(T) aligned!"); \
197 using Base = atomic_size_aligned<T>; \
201 EA_CONSTEXPR atomic_base_width(T desired) EA_NOEXCEPT \
206 EA_CONSTEXPR atomic_base_width() EA_NOEXCEPT_IF(eastl::is_nothrow_default_constructible_v<T>) = default; \
208 atomic_base_width(const atomic_base_width&) EA_NOEXCEPT = delete; \
214 void store(T desired) EA_NOEXCEPT \
216 EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_SEQ_CST_, bits); \
219 void store(T desired, eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
221 EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_RELAXED_, bits); \
224 void store(T desired, eastl::internal::memory_order_release_s) EA_NOEXCEPT \
226 EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_RELEASE_, bits); \
229 void store(T desired, eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
231 EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_SEQ_CST_, bits); \
238 T load() const EA_NOEXCEPT \
240 EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_SEQ_CST_, bits); \
243 T load(eastl::internal::memory_order_relaxed_s) const EA_NOEXCEPT \
245 EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_RELAXED_, bits); \
248 T load(eastl::internal::memory_order_acquire_s) const EA_NOEXCEPT \
250 EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_ACQUIRE_, bits); \
253 T load(eastl::internal::memory_order_seq_cst_s) const EA_NOEXCEPT \
255 EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_SEQ_CST_, bits); \
260 using Base::exchange; \
262 T exchange(T desired) EA_NOEXCEPT \
264 EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_SEQ_CST_, bits); \
267 T exchange(T desired, eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
269 EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_RELAXED_, bits); \
272 T exchange(T desired, eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
274 EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_ACQUIRE_, bits); \
277 T exchange(T desired, eastl::internal::memory_order_release_s) EA_NOEXCEPT \
279 EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_RELEASE_, bits); \
282 T exchange(T desired, eastl::internal::memory_order_acq_rel_s) EA_NOEXCEPT \
284 EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_ACQ_REL_, bits); \
287 T exchange(T desired, eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
289 EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_SEQ_CST_, bits); \
294 EASTL_ATOMIC_BASE_CMPXCHG_WEAK_FUNCS_IMPL(bits) \
298 EASTL_ATOMIC_BASE_CMPXCHG_STRONG_FUNCS_IMPL(bits) \
302 using Base::operator=; \
304 T operator=(T desired) EA_NOEXCEPT \
306 store(desired, eastl::memory_order_seq_cst); \
310 atomic_base_width& operator=(const atomic_base_width&) EA_NOEXCEPT = delete; \
311 atomic_base_width& operator=(const atomic_base_width&) volatile EA_NOEXCEPT = delete; \
316 #if defined(EASTL_ATOMIC_HAS_8BIT)
317 EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(1, 8)
320 #if defined(EASTL_ATOMIC_HAS_16BIT)
321 EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(2, 16)
324 #if defined(EASTL_ATOMIC_HAS_32BIT)
325 EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(4, 32)
328 #if defined(EASTL_ATOMIC_HAS_64BIT)
329 EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(8, 64)
332 #if defined(EASTL_ATOMIC_HAS_128BIT)
333 EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(16, 128)
343 #include "atomic_pop_compiler_options.h"
EA Standard Template Library.
Definition: algorithm.h:288
Definition: atomic_base_width.h:26