Nugget
atomic_size_aligned.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_SIZE_ALIGNED_H
7 #define EASTL_ATOMIC_INTERNAL_SIZE_ALIGNED_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
14 #include "atomic_push_compiler_options.h"
15 
16 
17 namespace eastl
18 {
19 
20 
21 namespace internal
22 {
23 
24 
25 #define EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_IMPL(funcName) \
26  template <typename OrderSuccess, typename OrderFailure> \
27  bool funcName(T& /*expected*/, T /*desired*/, \
28  OrderSuccess /*orderSuccess*/, \
29  OrderFailure /*orderFailure*/) EA_NOEXCEPT \
30  { \
31  EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
32  return false; \
33  } \
34  \
35  template <typename OrderSuccess, typename OrderFailure> \
36  bool funcName(T& /*expected*/, T /*desired*/, \
37  OrderSuccess /*orderSuccess*/, \
38  OrderFailure /*orderFailure*/) volatile EA_NOEXCEPT \
39  { \
40  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
41  return false; \
42  } \
43  \
44  template <typename Order> \
45  bool funcName(T& /*expected*/, T /*desired*/, \
46  Order /*order*/) EA_NOEXCEPT \
47  { \
48  EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T); \
49  return false; \
50  } \
51  \
52  template <typename Order> \
53  bool funcName(T& /*expected*/, T /*desired*/, \
54  Order /*order*/) volatile EA_NOEXCEPT \
55  { \
56  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
57  return false; \
58  } \
59  \
60  bool funcName(T& /*expected*/, T /*desired*/) volatile EA_NOEXCEPT \
61  { \
62  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T); \
63  return false; \
64  }
65 
66 #define EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_WEAK_IMPL() \
67  EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_IMPL(compare_exchange_weak)
68 
69 #define EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_STRONG_IMPL() \
70  EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_IMPL(compare_exchange_strong)
71 
72 
73  template<typename T>
75  {
76  public: /* ctors */
77 
78  EA_CONSTEXPR atomic_size_aligned(T desired) EA_NOEXCEPT
79  : mAtomic{ desired }
80  {
81  }
82 
83  EA_CONSTEXPR atomic_size_aligned() EA_NOEXCEPT_IF(eastl::is_nothrow_default_constructible_v<T>)
84  : mAtomic{} /* Value-Initialize which will Zero-Initialize Trivial Constructible types */
85  {
86  }
87 
88  atomic_size_aligned(const atomic_size_aligned&) EA_NOEXCEPT = delete;
89 
90  public: /* store */
91 
92  template <typename Order>
93  void store(T /*desired*/, Order /*order*/) EA_NOEXCEPT
94  {
95  EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T);
96  }
97 
98  template <typename Order>
99  void store(T /*desired*/, Order /*order*/) volatile EA_NOEXCEPT
100  {
101  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
102  }
103 
104  void store(T /*desired*/) volatile EA_NOEXCEPT
105  {
106  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
107  }
108 
109  public: /* load */
110 
111  template <typename Order>
112  T load(Order /*order*/) const EA_NOEXCEPT
113  {
114  EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T);
115  }
116 
117  template <typename Order>
118  T load(Order /*order*/) const volatile EA_NOEXCEPT
119  {
120  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
121  }
122 
123  T load() const volatile EA_NOEXCEPT
124  {
125  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
126  }
127 
128  public: /* exchange */
129 
130  template <typename Order>
131  T exchange(T /*desired*/, Order /*order*/) EA_NOEXCEPT
132  {
133  EASTL_ATOMIC_STATIC_ASSERT_INVALID_MEMORY_ORDER(T);
134  }
135 
136  template <typename Order>
137  T exchange(T /*desired*/, Order /*order*/) volatile EA_NOEXCEPT
138  {
139  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
140  }
141 
142  T exchange(T /*desired*/) volatile EA_NOEXCEPT
143  {
144  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
145  }
146 
147  public: /* compare_exchange_weak */
148 
149  EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_WEAK_IMPL()
150 
151  public: /* compare_exchange_strong */
152 
153  EASTL_ATOMIC_SIZE_ALIGNED_STATIC_ASSERT_CMPXCHG_STRONG_IMPL()
154 
155  public: /* assignment operator */
156 
157  T operator=(T /*desired*/) volatile EA_NOEXCEPT
158  {
159  EASTL_ATOMIC_STATIC_ASSERT_VOLATILE_MEM_FN(T);
160  }
161 
162  atomic_size_aligned& operator=(const atomic_size_aligned&) EA_NOEXCEPT = delete;
163  atomic_size_aligned& operator=(const atomic_size_aligned&) volatile EA_NOEXCEPT = delete;
164 
165  protected: /* Accessors */
166 
167  T* GetAtomicAddress() const EA_NOEXCEPT
168  {
169  return eastl::addressof(mAtomic);
170  }
171 
172  private:
173 
184  EA_ALIGN(sizeof(T)) mutable T mAtomic;
185  };
186 
187 
188 } // namespace internal
189 
190 
191 } // namespace eastl
192 
193 
194 #include "atomic_pop_compiler_options.h"
195 
196 
197 #endif /* EASTL_ATOMIC_INTERNAL_SIZE_ALIGNED_H */
EA Standard Template Library.
Definition: algorithm.h:288
T * addressof(T &value) EA_NOEXCEPT
Definition: memory_base.h:29
Definition: atomic_size_aligned.h:75