6 #ifndef EASTL_ATOMIC_INTERNAL_POINTER_H
7 #define EASTL_ATOMIC_INTERNAL_POINTER_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
14 #include "atomic_push_compiler_options.h"
25 template <
typename T,
unsigned w
idth = sizeof(T)>
28 #define EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(funcName) \
29 template <typename Order> \
30 T* funcName(ptrdiff_t , Order ) EA_NOEXCEPT \
32 EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
35 template <typename Order> \
36 T* funcName(ptrdiff_t , Order ) volatile EA_NOEXCEPT \
38 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
41 T* funcName(ptrdiff_t ) volatile EA_NOEXCEPT \
43 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
46 #define EASTL_ATOMIC_POINTER_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(operatorOp) \
47 T* operator operatorOp() volatile EA_NOEXCEPT \
49 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
52 T* operator operatorOp(int) volatile EA_NOEXCEPT \
54 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
57 #define EASTL_ATOMIC_POINTER_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(operatorOp) \
58 T* operator operatorOp(ptrdiff_t ) volatile EA_NOEXCEPT \
60 EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
64 template <
typename T,
unsigned w
idth>
84 using Base::operator=;
91 EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(fetch_add)
95 EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(add_fetch)
99 EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(fetch_sub)
103 EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(sub_fetch)
107 EASTL_ATOMIC_POINTER_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(++)
109 EASTL_ATOMIC_POINTER_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(--)
113 EASTL_ATOMIC_POINTER_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(+=)
115 EASTL_ATOMIC_POINTER_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(-=)
120 template <
typename T,
unsigned w
idth = sizeof(T)>
123 #define EASTL_ATOMIC_POINTER_FUNC_IMPL(op, bits) \
126 ptr_integral_type retType; \
127 ptr_integral_type addend = static_cast<ptr_integral_type>(arg) * static_cast<ptr_integral_type>(sizeof(T)); \
129 EA_PREPROCESSOR_JOIN(op, bits)(ptr_integral_type, retType, EASTL_ATOMIC_INTEGRAL_CAST(ptr_integral_type, this->GetAtomicAddress()), addend); \
131 retVal = reinterpret_cast<T*>(retType); \
135 #define EASTL_ATOMIC_POINTER_FETCH_IMPL(funcName, op, bits) \
136 T* funcName(ptrdiff_t arg) EA_NOEXCEPT \
138 EASTL_ATOMIC_STATIC_ASSERT_TYPE_IS_OBJECT(T); \
139 EASTL_ATOMIC_POINTER_FUNC_IMPL(op, bits); \
142 #define EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, orderType, op, bits) \
143 T* funcName(ptrdiff_t arg, orderType) EA_NOEXCEPT \
145 EASTL_ATOMIC_STATIC_ASSERT_TYPE_IS_OBJECT(T); \
146 EASTL_ATOMIC_POINTER_FUNC_IMPL(op, bits); \
149 #define EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, Order) \
150 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_, fetchOp), Order)
152 #define EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(funcName, fetchOp, bits) \
153 using Base::funcName; \
155 EASTL_ATOMIC_POINTER_FETCH_IMPL(funcName, EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _SEQ_CST_), bits) \
157 EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_relaxed_s, \
158 EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _RELAXED_), bits) \
160 EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_acquire_s, \
161 EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _ACQUIRE_), bits) \
163 EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_release_s, \
164 EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _RELEASE_), bits) \
166 EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_acq_rel_s, \
167 EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _ACQ_REL_), bits) \
169 EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_seq_cst_s, \
170 EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _SEQ_CST_), bits)
172 #define EASTL_ATOMIC_POINTER_FETCH_INC_DEC_OPERATOR_IMPL(operatorOp, preFuncName, postFuncName) \
173 using Base::operator operatorOp; \
175 T* operator operatorOp() EA_NOEXCEPT \
177 return preFuncName(1, eastl::memory_order_seq_cst); \
180 T* operator operatorOp(int) EA_NOEXCEPT \
182 return postFuncName(1, eastl::memory_order_seq_cst); \
185 #define EASTL_ATOMIC_POINTER_FETCH_ASSIGNMENT_OPERATOR_IMPL(operatorOp, funcName) \
186 using Base::operator operatorOp; \
188 T* operator operatorOp(ptrdiff_t arg) EA_NOEXCEPT \
190 return funcName(arg, eastl::memory_order_seq_cst); \
194 #define EASTL_ATOMIC_POINTER_WIDTH_SPECIALIZE(bytes, bits) \
195 template <typename T> \
196 struct atomic_pointer_width<T*, bytes> : public atomic_pointer_base<T*, bytes> \
200 using Base = atomic_pointer_base<T*, bytes>; \
201 using u_ptr_integral_type = EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(uint, bits), _t); \
202 using ptr_integral_type = EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(int, bits), _t); \
206 EA_CONSTEXPR atomic_pointer_width(T* desired) EA_NOEXCEPT \
211 EA_CONSTEXPR atomic_pointer_width() EA_NOEXCEPT = default; \
213 atomic_pointer_width(const atomic_pointer_width&) EA_NOEXCEPT = delete; \
217 using Base::operator=; \
219 atomic_pointer_width& operator=(const atomic_pointer_width&) EA_NOEXCEPT = delete; \
220 atomic_pointer_width& operator=(const atomic_pointer_width&) volatile EA_NOEXCEPT = delete; \
224 EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(fetch_add, FETCH_ADD, bits) \
228 EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(add_fetch, ADD_FETCH, bits) \
232 EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(fetch_sub, FETCH_SUB, bits) \
236 EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(sub_fetch, SUB_FETCH, bits) \
240 EASTL_ATOMIC_POINTER_FETCH_INC_DEC_OPERATOR_IMPL(++, add_fetch, fetch_add) \
242 EASTL_ATOMIC_POINTER_FETCH_INC_DEC_OPERATOR_IMPL(--, sub_fetch, fetch_sub) \
246 EASTL_ATOMIC_POINTER_FETCH_ASSIGNMENT_OPERATOR_IMPL(+=, add_fetch) \
248 EASTL_ATOMIC_POINTER_FETCH_ASSIGNMENT_OPERATOR_IMPL(-=, sub_fetch) \
254 T* load(eastl::internal::memory_order_read_depends_s) EA_NOEXCEPT \
257 EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_LOAD_READ_DEPENDS_, bits)(T*, retPointer, this->GetAtomicAddress()); \
263 #if defined(EASTL_ATOMIC_HAS_32BIT) && EA_PLATFORM_PTR_SIZE == 4
264 EASTL_ATOMIC_POINTER_WIDTH_SPECIALIZE(4, 32)
267 #if defined(EASTL_ATOMIC_HAS_64BIT) && EA_PLATFORM_PTR_SIZE == 8
268 EASTL_ATOMIC_POINTER_WIDTH_SPECIALIZE(8, 64)
278 #include "atomic_pop_compiler_options.h"
EA Standard Template Library.
Definition: algorithm.h:288
Definition: atomic_pointer.h:26
Definition: atomic_pointer.h:121