6 #ifndef EASTL_ATOMIC_INTERNAL_INTEGRAL_H
7 #define EASTL_ATOMIC_INTERNAL_INTEGRAL_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
14 #include "atomic_push_compiler_options.h"
25 #define EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(funcName) \
26 template <typename Order> \
27 T funcName(T , Order ) EA_NOEXCEPT \
29 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
32 template <typename Order> \
33 T funcName(T , Order ) volatile EA_NOEXCEPT \
35 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
38 T funcName(T ) volatile EA_NOEXCEPT \
40 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
44 #define EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(operatorOp) \
45 T operator operatorOp() volatile EA_NOEXCEPT \
47 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
50 T operator operatorOp(int) volatile EA_NOEXCEPT \
52 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
56 #define EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(operatorOp) \
57 T operator operatorOp(T ) volatile EA_NOEXCEPT \
59 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
63 template <
typename T,
unsigned w
idth = sizeof(T)>
83 using Base::operator=;
90 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(fetch_add)
94 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(add_fetch)
98 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(fetch_sub)
102 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(sub_fetch)
106 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(fetch_and)
110 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(and_fetch)
114 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(fetch_or)
118 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(or_fetch)
122 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(fetch_xor)
126 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_FUNCS_IMPL(xor_fetch)
130 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(++)
132 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(--)
136 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(+=)
138 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(-=)
142 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(&=)
146 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(|=)
150 EASTL_ATOMIC_INTEGRAL_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(^=)
155 template <
typename T,
unsigned w
idth = sizeof(T)>
158 #define EASTL_ATOMIC_INTEGRAL_FUNC_IMPL(op, bits) \
160 EA_PREPROCESSOR_JOIN(op, bits)(T, retVal, this->GetAtomicAddress(), arg); \
163 #define EASTL_ATOMIC_INTEGRAL_FETCH_IMPL(funcName, op, bits) \
164 T funcName(T arg) EA_NOEXCEPT \
166 EASTL_ATOMIC_INTEGRAL_FUNC_IMPL(op, bits); \
169 #define EASTL_ATOMIC_INTEGRAL_FETCH_ORDER_IMPL(funcName, orderType, op, bits) \
170 T funcName(T arg, orderType) EA_NOEXCEPT \
172 EASTL_ATOMIC_INTEGRAL_FUNC_IMPL(op, bits); \
175 #define EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, Order) \
176 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_, fetchOp), Order)
178 #define EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(funcName, fetchOp, bits) \
179 using Base::funcName; \
181 EASTL_ATOMIC_INTEGRAL_FETCH_IMPL(funcName, EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, _SEQ_CST_), bits) \
183 EASTL_ATOMIC_INTEGRAL_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_relaxed_s, \
184 EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, _RELAXED_), bits) \
186 EASTL_ATOMIC_INTEGRAL_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_acquire_s, \
187 EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, _ACQUIRE_), bits) \
189 EASTL_ATOMIC_INTEGRAL_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_release_s, \
190 EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, _RELEASE_), bits) \
192 EASTL_ATOMIC_INTEGRAL_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_acq_rel_s, \
193 EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, _ACQ_REL_), bits) \
195 EASTL_ATOMIC_INTEGRAL_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_seq_cst_s, \
196 EASTL_ATOMIC_INTEGRAL_FETCH_OP_JOIN(fetchOp, _SEQ_CST_), bits)
198 #define EASTL_ATOMIC_INTEGRAL_FETCH_INC_DEC_OPERATOR_IMPL(operatorOp, preFuncName, postFuncName) \
199 using Base::operator operatorOp; \
201 T operator operatorOp() EA_NOEXCEPT \
203 return preFuncName(1, eastl::memory_order_seq_cst); \
206 T operator operatorOp(int) EA_NOEXCEPT \
208 return postFuncName(1, eastl::memory_order_seq_cst); \
211 #define EASTL_ATOMIC_INTEGRAL_FETCH_ASSIGNMENT_OPERATOR_IMPL(operatorOp, funcName) \
212 using Base::operator operatorOp; \
214 T operator operatorOp(T arg) EA_NOEXCEPT \
216 return funcName(arg, eastl::memory_order_seq_cst); \
220 #define EASTL_ATOMIC_INTEGRAL_WIDTH_SPECIALIZE(bytes, bits) \
221 template <typename T> \
222 struct atomic_integral_width<T, bytes> : public atomic_integral_base<T, bytes> \
226 using Base = atomic_integral_base<T, bytes>; \
230 EA_CONSTEXPR atomic_integral_width(T desired) EA_NOEXCEPT \
235 EA_CONSTEXPR atomic_integral_width() EA_NOEXCEPT = default; \
237 atomic_integral_width(const atomic_integral_width&) EA_NOEXCEPT = delete; \
241 using Base::operator=; \
243 atomic_integral_width& operator=(const atomic_integral_width&) EA_NOEXCEPT = delete; \
244 atomic_integral_width& operator=(const atomic_integral_width&) volatile EA_NOEXCEPT = delete; \
248 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(fetch_add, FETCH_ADD, bits) \
252 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(add_fetch, ADD_FETCH, bits) \
256 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(fetch_sub, FETCH_SUB, bits) \
260 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(sub_fetch, SUB_FETCH, bits) \
264 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(fetch_and, FETCH_AND, bits) \
268 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(and_fetch, AND_FETCH, bits) \
272 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(fetch_or, FETCH_OR, bits) \
276 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(or_fetch, OR_FETCH, bits) \
280 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(fetch_xor, FETCH_XOR, bits) \
284 EASTL_ATOMIC_INTEGRAL_FETCH_FUNCS_IMPL(xor_fetch, XOR_FETCH, bits) \
288 EASTL_ATOMIC_INTEGRAL_FETCH_INC_DEC_OPERATOR_IMPL(++, add_fetch, fetch_add) \
290 EASTL_ATOMIC_INTEGRAL_FETCH_INC_DEC_OPERATOR_IMPL(--, sub_fetch, fetch_sub) \
294 EASTL_ATOMIC_INTEGRAL_FETCH_ASSIGNMENT_OPERATOR_IMPL(+=, add_fetch) \
296 EASTL_ATOMIC_INTEGRAL_FETCH_ASSIGNMENT_OPERATOR_IMPL(-=, sub_fetch) \
300 EASTL_ATOMIC_INTEGRAL_FETCH_ASSIGNMENT_OPERATOR_IMPL(&=, and_fetch) \
304 EASTL_ATOMIC_INTEGRAL_FETCH_ASSIGNMENT_OPERATOR_IMPL(|=, or_fetch) \
308 EASTL_ATOMIC_INTEGRAL_FETCH_ASSIGNMENT_OPERATOR_IMPL(^=, xor_fetch) \
313 #if defined(EASTL_ATOMIC_HAS_8BIT)
314 EASTL_ATOMIC_INTEGRAL_WIDTH_SPECIALIZE(1, 8)
317 #if defined(EASTL_ATOMIC_HAS_16BIT)
318 EASTL_ATOMIC_INTEGRAL_WIDTH_SPECIALIZE(2, 16)
321 #if defined(EASTL_ATOMIC_HAS_32BIT)
322 EASTL_ATOMIC_INTEGRAL_WIDTH_SPECIALIZE(4, 32)
325 #if defined(EASTL_ATOMIC_HAS_64BIT)
326 EASTL_ATOMIC_INTEGRAL_WIDTH_SPECIALIZE(8, 64)
329 #if defined(EASTL_ATOMIC_HAS_128BIT)
330 EASTL_ATOMIC_INTEGRAL_WIDTH_SPECIALIZE(16, 128)
340 #include "atomic_pop_compiler_options.h"
EA Standard Template Library.
Definition: algorithm.h:288
Definition: atomic_base_width.h:26
Definition: atomic_integral.h:65
Definition: atomic_integral.h:156