Nugget
atomic_pointer.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_POINTER_H
7 #define EASTL_ATOMIC_INTERNAL_POINTER_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
14 #include "atomic_push_compiler_options.h"
15 
16 
17 namespace eastl
18 {
19 
20 
21 namespace internal
22 {
23 
24 
25  template <typename T, unsigned width = sizeof(T)>
27 
28 #define EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(funcName) \
29  template <typename Order> \
30  T* funcName(ptrdiff_t /*arg*/, Order /*order*/) EA_NOEXCEPT \
31  { \
32  EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
33  } \
34  \
35  template <typename Order> \
36  T* funcName(ptrdiff_t /*arg*/, Order /*order*/) volatile EA_NOEXCEPT \
37  { \
38  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
39  } \
40  \
41  T* funcName(ptrdiff_t /*arg*/) volatile EA_NOEXCEPT \
42  { \
43  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
44  }
45 
46 #define EASTL_ATOMIC_POINTER_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(operatorOp) \
47  T* operator operatorOp() volatile EA_NOEXCEPT \
48  { \
49  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
50  } \
51  \
52  T* operator operatorOp(int) volatile EA_NOEXCEPT \
53  { \
54  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
55  }
56 
57 #define EASTL_ATOMIC_POINTER_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(operatorOp) \
58  T* operator operatorOp(ptrdiff_t /*arg*/) volatile EA_NOEXCEPT \
59  { \
60  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
61  }
62 
63 
64  template <typename T, unsigned width>
66  {
67  private:
68 
70 
71  public: /* ctors */
72 
73  EA_CONSTEXPR atomic_pointer_base(T* desired) EA_NOEXCEPT
74  : Base{ desired }
75  {
76  }
77 
78  EA_CONSTEXPR atomic_pointer_base() EA_NOEXCEPT = default;
79 
80  atomic_pointer_base(const atomic_pointer_base&) EA_NOEXCEPT = delete;
81 
82  public: /* assignment operators */
83 
84  using Base::operator=;
85 
86  atomic_pointer_base& operator=(const atomic_pointer_base&) EA_NOEXCEPT = delete;
87  atomic_pointer_base& operator=(const atomic_pointer_base&) volatile EA_NOEXCEPT = delete;
88 
89  public: /* fetch_add */
90 
91  EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(fetch_add)
92 
93  public: /* add_fetch */
94 
95  EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(add_fetch)
96 
97  public: /* fetch_sub */
98 
99  EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(fetch_sub)
100 
101  public: /* sub_fetch */
102 
103  EASTL_ATOMIC_POINTER_STATIC_ASSERT_FUNCS_IMPL(sub_fetch)
104 
105  public: /* operator++ && operator-- */
106 
107  EASTL_ATOMIC_POINTER_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(++)
108 
109  EASTL_ATOMIC_POINTER_STATIC_ASSERT_INC_DEC_OPERATOR_IMPL(--)
110 
111  public: /* operator+= && operator-= */
112 
113  EASTL_ATOMIC_POINTER_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(+=)
114 
115  EASTL_ATOMIC_POINTER_STATIC_ASSERT_ASSIGNMENT_OPERATOR_IMPL(-=)
116 
117  };
118 
119 
120  template <typename T, unsigned width = sizeof(T)>
122 
123 #define EASTL_ATOMIC_POINTER_FUNC_IMPL(op, bits) \
124  T* retVal; \
125  { \
126  ptr_integral_type retType; \
127  ptr_integral_type addend = static_cast<ptr_integral_type>(arg) * static_cast<ptr_integral_type>(sizeof(T)); \
128  \
129  EA_PREPROCESSOR_JOIN(op, bits)(ptr_integral_type, retType, EASTL_ATOMIC_INTEGRAL_CAST(ptr_integral_type, this->GetAtomicAddress()), addend); \
130  \
131  retVal = reinterpret_cast<T*>(retType); \
132  } \
133  return retVal;
134 
135 #define EASTL_ATOMIC_POINTER_FETCH_IMPL(funcName, op, bits) \
136  T* funcName(ptrdiff_t arg) EA_NOEXCEPT \
137  { \
138  EASTL_ATOMIC_STATIC_ASSERT_TYPE_IS_OBJECT(T); \
139  EASTL_ATOMIC_POINTER_FUNC_IMPL(op, bits); \
140  }
141 
142 #define EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, orderType, op, bits) \
143  T* funcName(ptrdiff_t arg, orderType) EA_NOEXCEPT \
144  { \
145  EASTL_ATOMIC_STATIC_ASSERT_TYPE_IS_OBJECT(T); \
146  EASTL_ATOMIC_POINTER_FUNC_IMPL(op, bits); \
147  }
148 
149 #define EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, Order) \
150  EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_, fetchOp), Order)
151 
152 #define EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(funcName, fetchOp, bits) \
153  using Base::funcName; \
154  \
155  EASTL_ATOMIC_POINTER_FETCH_IMPL(funcName, EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _SEQ_CST_), bits) \
156  \
157  EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_relaxed_s, \
158  EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _RELAXED_), bits) \
159  \
160  EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_acquire_s, \
161  EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _ACQUIRE_), bits) \
162  \
163  EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_release_s, \
164  EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _RELEASE_), bits) \
165  \
166  EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_acq_rel_s, \
167  EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _ACQ_REL_), bits) \
168  \
169  EASTL_ATOMIC_POINTER_FETCH_ORDER_IMPL(funcName, eastl::internal::memory_order_seq_cst_s, \
170  EASTL_ATOMIC_POINTER_FETCH_OP_JOIN(fetchOp, _SEQ_CST_), bits)
171 
172 #define EASTL_ATOMIC_POINTER_FETCH_INC_DEC_OPERATOR_IMPL(operatorOp, preFuncName, postFuncName) \
173  using Base::operator operatorOp; \
174  \
175  T* operator operatorOp() EA_NOEXCEPT \
176  { \
177  return preFuncName(1, eastl::memory_order_seq_cst); \
178  } \
179  \
180  T* operator operatorOp(int) EA_NOEXCEPT \
181  { \
182  return postFuncName(1, eastl::memory_order_seq_cst); \
183  }
184 
185 #define EASTL_ATOMIC_POINTER_FETCH_ASSIGNMENT_OPERATOR_IMPL(operatorOp, funcName) \
186  using Base::operator operatorOp; \
187  \
188  T* operator operatorOp(ptrdiff_t arg) EA_NOEXCEPT \
189  { \
190  return funcName(arg, eastl::memory_order_seq_cst); \
191  }
192 
193 
194 #define EASTL_ATOMIC_POINTER_WIDTH_SPECIALIZE(bytes, bits) \
195  template <typename T> \
196  struct atomic_pointer_width<T*, bytes> : public atomic_pointer_base<T*, bytes> \
197  { \
198  private: \
199  \
200  using Base = atomic_pointer_base<T*, bytes>; \
201  using u_ptr_integral_type = EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(uint, bits), _t); \
202  using ptr_integral_type = EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(int, bits), _t); \
203  \
204  public: /* ctors */ \
205  \
206  EA_CONSTEXPR atomic_pointer_width(T* desired) EA_NOEXCEPT \
207  : Base{ desired } \
208  { \
209  } \
210  \
211  EA_CONSTEXPR atomic_pointer_width() EA_NOEXCEPT = default; \
212  \
213  atomic_pointer_width(const atomic_pointer_width&) EA_NOEXCEPT = delete; \
214  \
215  public: /* assignment operators */ \
216  \
217  using Base::operator=; \
218  \
219  atomic_pointer_width& operator=(const atomic_pointer_width&) EA_NOEXCEPT = delete; \
220  atomic_pointer_width& operator=(const atomic_pointer_width&) volatile EA_NOEXCEPT = delete; \
221  \
222  public: /* fetch_add */ \
223  \
224  EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(fetch_add, FETCH_ADD, bits) \
225  \
226  public: /* add_fetch */ \
227  \
228  EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(add_fetch, ADD_FETCH, bits) \
229  \
230  public: /* fetch_sub */ \
231  \
232  EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(fetch_sub, FETCH_SUB, bits) \
233  \
234  public: /* sub_fetch */ \
235  \
236  EASTL_ATOMIC_POINTER_FETCH_FUNCS_IMPL(sub_fetch, SUB_FETCH, bits) \
237  \
238  public: /* operator++ && operator-- */ \
239  \
240  EASTL_ATOMIC_POINTER_FETCH_INC_DEC_OPERATOR_IMPL(++, add_fetch, fetch_add) \
241  \
242  EASTL_ATOMIC_POINTER_FETCH_INC_DEC_OPERATOR_IMPL(--, sub_fetch, fetch_sub) \
243  \
244  public: /* operator+= && operator-= */ \
245  \
246  EASTL_ATOMIC_POINTER_FETCH_ASSIGNMENT_OPERATOR_IMPL(+=, add_fetch) \
247  \
248  EASTL_ATOMIC_POINTER_FETCH_ASSIGNMENT_OPERATOR_IMPL(-=, sub_fetch) \
249  \
250  public: \
251  \
252  using Base::load; \
253  \
254  T* load(eastl::internal::memory_order_read_depends_s) EA_NOEXCEPT \
255  { \
256  T* retPointer; \
257  EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_LOAD_READ_DEPENDS_, bits)(T*, retPointer, this->GetAtomicAddress()); \
258  return retPointer; \
259  } \
260  };
261 
262 
263 #if defined(EASTL_ATOMIC_HAS_32BIT) && EA_PLATFORM_PTR_SIZE == 4
264  EASTL_ATOMIC_POINTER_WIDTH_SPECIALIZE(4, 32)
265 #endif
266 
267 #if defined(EASTL_ATOMIC_HAS_64BIT) && EA_PLATFORM_PTR_SIZE == 8
268  EASTL_ATOMIC_POINTER_WIDTH_SPECIALIZE(8, 64)
269 #endif
270 
271 
272 } // namespace internal
273 
274 
275 } // namespace eastl
276 
277 
278 #include "atomic_pop_compiler_options.h"
279 
280 
281 #endif /* EASTL_ATOMIC_INTERNAL_POINTER_H */
EA Standard Template Library.
Definition: algorithm.h:288
Definition: atomic_pointer.h:26
Definition: atomic_pointer.h:121