Nugget
atomic_base_width.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_BASE_WIDTH_H
7 #define EASTL_ATOMIC_INTERNAL_BASE_WIDTH_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
14 #include "atomic_push_compiler_options.h"
15 
16 
17 namespace eastl
18 {
19 
20 
21 namespace internal
22 {
23 
24 
25  template <typename T, unsigned width = sizeof(T)>
27 
37 #define EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) \
38  EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_FIXED_WIDTH_TYPE_, bits)
39 
40 
41 #define EASTL_ATOMIC_STORE_FUNC_IMPL(op, bits) \
42  EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) fixedWidthDesired = EASTL_ATOMIC_TYPE_PUN_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), desired); \
43  EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
44  EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress()), \
45  fixedWidthDesired)
46 
47 
48 #define EASTL_ATOMIC_LOAD_FUNC_IMPL(op, bits) \
49  EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) retVal; \
50  EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
51  retVal, \
52  EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress())); \
53  return EASTL_ATOMIC_TYPE_PUN_CAST(T, retVal);
54 
55 
56 #define EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(op, bits) \
57  EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) retVal; \
58  EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) fixedWidthDesired = EASTL_ATOMIC_TYPE_PUN_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), desired); \
59  EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
60  retVal, \
61  EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress()), \
62  fixedWidthDesired); \
63  return EASTL_ATOMIC_TYPE_PUN_CAST(T, retVal);
64 
65 
66 #define EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(op, bits) \
67  bool retVal; \
68  EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits) fixedWidthDesired = EASTL_ATOMIC_TYPE_PUN_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), desired); \
69  EA_PREPROCESSOR_JOIN(op, bits)(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), \
70  retVal, \
71  EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), this->GetAtomicAddress()), \
72  EASTL_ATOMIC_TYPE_CAST(EASTL_ATOMIC_BASE_FIXED_WIDTH_TYPE(bits), &expected), \
73  fixedWidthDesired); \
74  return retVal;
75 
76 
77 #define EASTL_ATOMIC_BASE_OP_JOIN(op, Order) \
78  EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_, op), Order)
79 
80 
81 #define EASTL_ATOMIC_BASE_CMPXCHG_FUNCS_IMPL(funcName, cmpxchgOp, bits) \
82  using Base::funcName; \
83  \
84  bool funcName(T& expected, T desired) EA_NOEXCEPT \
85  { \
86  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_), bits); \
87  } \
88  \
89  bool funcName(T& expected, T desired, \
90  eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
91  { \
92  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELAXED_), bits); \
93  } \
94  \
95  bool funcName(T& expected, T desired, \
96  eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
97  { \
98  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQUIRE_), bits); \
99  } \
100  \
101  bool funcName(T& expected, T desired, \
102  eastl::internal::memory_order_release_s) EA_NOEXCEPT \
103  { \
104  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELEASE_), bits); \
105  } \
106  \
107  bool funcName(T& expected, T desired, \
108  eastl::internal::memory_order_acq_rel_s) EA_NOEXCEPT \
109  { \
110  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQ_REL_), bits); \
111  } \
112  \
113  bool funcName(T& expected, T desired, \
114  eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
115  { \
116  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_), bits); \
117  } \
118  \
119  bool funcName(T& expected, T desired, \
120  eastl::internal::memory_order_relaxed_s, \
121  eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
122  { \
123  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELAXED_RELAXED_), bits); \
124  } \
125  \
126  bool funcName(T& expected, T desired, \
127  eastl::internal::memory_order_acquire_s, \
128  eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
129  { \
130  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQUIRE_RELAXED_), bits); \
131  } \
132  \
133  bool funcName(T& expected, T desired, \
134  eastl::internal::memory_order_acquire_s, \
135  eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
136  { \
137  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQUIRE_ACQUIRE_), bits); \
138  } \
139  \
140  bool funcName(T& expected, T desired, \
141  eastl::internal::memory_order_release_s, \
142  eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
143  { \
144  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _RELEASE_RELAXED_), bits); \
145  } \
146  \
147  bool funcName(T& expected, T desired, \
148  eastl::internal::memory_order_acq_rel_s, \
149  eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
150  { \
151  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQ_REL_RELAXED_), bits); \
152  } \
153  \
154  bool funcName(T& expected, T desired, \
155  eastl::internal::memory_order_acq_rel_s, \
156  eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
157  { \
158  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _ACQ_REL_ACQUIRE_), bits); \
159  } \
160  \
161  bool funcName(T& expected, T desired, \
162  eastl::internal::memory_order_seq_cst_s, \
163  eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
164  { \
165  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_RELAXED_), bits); \
166  } \
167  \
168  bool funcName(T& expected, T desired, \
169  eastl::internal::memory_order_seq_cst_s, \
170  eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
171  { \
172  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_ACQUIRE_), bits); \
173  } \
174  \
175  bool funcName(T& expected, T desired, \
176  eastl::internal::memory_order_seq_cst_s, \
177  eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
178  { \
179  EASTL_ATOMIC_CMPXCHG_FUNC_IMPL(EASTL_ATOMIC_BASE_OP_JOIN(cmpxchgOp, _SEQ_CST_SEQ_CST_), bits); \
180  }
181 
182 #define EASTL_ATOMIC_BASE_CMPXCHG_WEAK_FUNCS_IMPL(bits) \
183  EASTL_ATOMIC_BASE_CMPXCHG_FUNCS_IMPL(compare_exchange_weak, CMPXCHG_WEAK, bits)
184 
185 #define EASTL_ATOMIC_BASE_CMPXCHG_STRONG_FUNCS_IMPL(bits) \
186  EASTL_ATOMIC_BASE_CMPXCHG_FUNCS_IMPL(compare_exchange_strong, CMPXCHG_STRONG, bits)
187 
188 
189 #define EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(bytes, bits) \
190  template <typename T> \
191  struct atomic_base_width<T, bytes> : public atomic_size_aligned<T> \
192  { \
193  private: \
194  \
195  static_assert(EA_ALIGN_OF(atomic_size_aligned<T>) == bytes, "eastl::atomic<T> must be sizeof(T) aligned!"); \
196  static_assert(EA_ALIGN_OF(atomic_size_aligned<T>) == sizeof(T), "eastl::atomic<T> must be sizeof(T) aligned!"); \
197  using Base = atomic_size_aligned<T>; \
198  \
199  public: /* ctors */ \
200  \
201  EA_CONSTEXPR atomic_base_width(T desired) EA_NOEXCEPT \
202  : Base{ desired } \
203  { \
204  } \
205  \
206  EA_CONSTEXPR atomic_base_width() EA_NOEXCEPT_IF(eastl::is_nothrow_default_constructible_v<T>) = default; \
207  \
208  atomic_base_width(const atomic_base_width&) EA_NOEXCEPT = delete; \
209  \
210  public: /* store */ \
211  \
212  using Base::store; \
213  \
214  void store(T desired) EA_NOEXCEPT \
215  { \
216  EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_SEQ_CST_, bits); \
217  } \
218  \
219  void store(T desired, eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
220  { \
221  EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_RELAXED_, bits); \
222  } \
223  \
224  void store(T desired, eastl::internal::memory_order_release_s) EA_NOEXCEPT \
225  { \
226  EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_RELEASE_, bits); \
227  } \
228  \
229  void store(T desired, eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
230  { \
231  EASTL_ATOMIC_STORE_FUNC_IMPL(EASTL_ATOMIC_STORE_SEQ_CST_, bits); \
232  } \
233  \
234  public: /* load */ \
235  \
236  using Base::load; \
237  \
238  T load() const EA_NOEXCEPT \
239  { \
240  EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_SEQ_CST_, bits); \
241  } \
242  \
243  T load(eastl::internal::memory_order_relaxed_s) const EA_NOEXCEPT \
244  { \
245  EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_RELAXED_, bits); \
246  } \
247  \
248  T load(eastl::internal::memory_order_acquire_s) const EA_NOEXCEPT \
249  { \
250  EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_ACQUIRE_, bits); \
251  } \
252  \
253  T load(eastl::internal::memory_order_seq_cst_s) const EA_NOEXCEPT \
254  { \
255  EASTL_ATOMIC_LOAD_FUNC_IMPL(EASTL_ATOMIC_LOAD_SEQ_CST_, bits); \
256  } \
257  \
258  public: /* exchange */ \
259  \
260  using Base::exchange; \
261  \
262  T exchange(T desired) EA_NOEXCEPT \
263  { \
264  EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_SEQ_CST_, bits); \
265  } \
266  \
267  T exchange(T desired, eastl::internal::memory_order_relaxed_s) EA_NOEXCEPT \
268  { \
269  EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_RELAXED_, bits); \
270  } \
271  \
272  T exchange(T desired, eastl::internal::memory_order_acquire_s) EA_NOEXCEPT \
273  { \
274  EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_ACQUIRE_, bits); \
275  } \
276  \
277  T exchange(T desired, eastl::internal::memory_order_release_s) EA_NOEXCEPT \
278  { \
279  EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_RELEASE_, bits); \
280  } \
281  \
282  T exchange(T desired, eastl::internal::memory_order_acq_rel_s) EA_NOEXCEPT \
283  { \
284  EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_ACQ_REL_, bits); \
285  } \
286  \
287  T exchange(T desired, eastl::internal::memory_order_seq_cst_s) EA_NOEXCEPT \
288  { \
289  EASTL_ATOMIC_EXCHANGE_FUNC_IMPL(EASTL_ATOMIC_EXCHANGE_SEQ_CST_, bits); \
290  } \
291  \
292  public: /* compare_exchange_weak */ \
293  \
294  EASTL_ATOMIC_BASE_CMPXCHG_WEAK_FUNCS_IMPL(bits) \
295  \
296  public: /* compare_exchange_strong */ \
297  \
298  EASTL_ATOMIC_BASE_CMPXCHG_STRONG_FUNCS_IMPL(bits) \
299  \
300  public: /* assignment operator */ \
301  \
302  using Base::operator=; \
303  \
304  T operator=(T desired) EA_NOEXCEPT \
305  { \
306  store(desired, eastl::memory_order_seq_cst); \
307  return desired; \
308  } \
309  \
310  atomic_base_width& operator=(const atomic_base_width&) EA_NOEXCEPT = delete; \
311  atomic_base_width& operator=(const atomic_base_width&) volatile EA_NOEXCEPT = delete; \
312  \
313  };
314 
315 
316 #if defined(EASTL_ATOMIC_HAS_8BIT)
317  EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(1, 8)
318 #endif
319 
320 #if defined(EASTL_ATOMIC_HAS_16BIT)
321  EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(2, 16)
322 #endif
323 
324 #if defined(EASTL_ATOMIC_HAS_32BIT)
325  EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(4, 32)
326 #endif
327 
328 #if defined(EASTL_ATOMIC_HAS_64BIT)
329  EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(8, 64)
330 #endif
331 
332 #if defined(EASTL_ATOMIC_HAS_128BIT)
333  EASTL_ATOMIC_BASE_WIDTH_SPECIALIZE(16, 128)
334 #endif
335 
336 
337 } // namespace internal
338 
339 
340 } // namespace eastl
341 
342 
343 #include "atomic_pop_compiler_options.h"
344 
345 
346 #endif /* EASTL_ATOMIC_INTERNAL_BASE_WIDTH_H */
EA Standard Template Library.
Definition: algorithm.h:288
Definition: atomic_base_width.h:26