6 #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_H
7 #define EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
14 EA_DISABLE_ALL_VC_WARNINGS();
17 EA_RESTORE_ALL_VC_WARNINGS();
23 #define EASTL_COMPILER_ATOMIC_HAS_8BIT
24 #define EASTL_COMPILER_ATOMIC_HAS_16BIT
25 #define EASTL_COMPILER_ATOMIC_HAS_32BIT
26 #define EASTL_COMPILER_ATOMIC_HAS_64BIT
28 #if EA_PLATFORM_PTR_SIZE == 8
29 #define EASTL_COMPILER_ATOMIC_HAS_128BIT
36 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_8 char
37 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_16 short
38 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_32 long
39 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_64 __int64
56 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_128 eastl::internal::FixedWidth128
72 #if defined(EA_PROCESSOR_X86) || defined(EA_PROCESSOR_X86_64)
75 #define EASTL_MSVC_ATOMIC_FETCH_OP(ret, ptr, val, MemoryOrder, Intrinsic) \
76 ret = Intrinsic(ptr, val)
78 #define EASTL_MSVC_ATOMIC_EXCHANGE_OP(ret, ptr, val, MemoryOrder, Intrinsic) \
79 ret = Intrinsic(ptr, val)
81 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_OP(ret, ptr, comparand, exchange, MemoryOrder, Intrinsic) \
82 ret = Intrinsic(ptr, exchange, comparand)
84 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_128_OP(ret, ptr, comparandResult, exchangeHigh, exchangeLow, MemoryOrder) \
85 ret = _InterlockedCompareExchange128_np(ptr, exchangeHigh, exchangeLow, comparandResult)
88 #elif defined(EA_PROCESSOR_ARM32) || defined(EA_PROCESSOR_ARM64)
91 #define EASTL_MSVC_INTRINSIC_RELAXED(Intrinsic) \
92 EA_PREPROCESSOR_JOIN(Intrinsic, _nf)
94 #define EASTL_MSVC_INTRINSIC_ACQUIRE(Intrinsic) \
95 EA_PREPROCESSOR_JOIN(Intrinsic, _acq)
97 #define EASTL_MSVC_INTRINSIC_RELEASE(Intrinsic) \
98 EA_PREPROCESSOR_JOIN(Intrinsic, _rel)
100 #define EASTL_MSVC_INTRINSIC_ACQ_REL(Intrinsic) \
103 #define EASTL_MSVC_INTRINSIC_SEQ_CST(Intrinsic) \
107 #define EASTL_MSVC_ATOMIC_FETCH_OP(ret, ptr, val, MemoryOrder, Intrinsic) \
108 ret = EA_PREPROCESSOR_JOIN(EASTL_MSVC_INTRINSIC_, MemoryOrder)(Intrinsic)(ptr, val)
110 #define EASTL_MSVC_ATOMIC_EXCHANGE_OP(ret, ptr, val, MemoryOrder, Intrinsic) \
111 ret = EA_PREPROCESSOR_JOIN(EASTL_MSVC_INTRINSIC_, MemoryOrder)(Intrinsic)(ptr, val)
113 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_OP(ret, ptr, comparand, exchange, MemoryOrder, Intrinsic) \
114 ret = EA_PREPROCESSOR_JOIN(EASTL_MSVC_INTRINSIC_, MemoryOrder)(Intrinsic)(ptr, exchange, comparand)
116 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_128_OP(ret, ptr, comparandResult, exchangeHigh, exchangeLow, MemoryOrder) \
117 ret = EA_PREPROCESSOR_JOIN(EASTL_MSVC_INTRINSIC_, MemoryOrder)(_InterlockedCompareExchange128)(ptr, exchangeHigh, exchangeLow, comparandResult)
126 #define EASTL_MSVC_NOP_POST_INTRIN_COMPUTE(ret, lhs, rhs)
128 #define EASTL_MSVC_NOP_PRE_INTRIN_COMPUTE(ret, val) \
132 #define EASTL_MSVC_ATOMIC_FETCH_INTRIN_N(integralType, fetchIntrinsic, type, ret, ptr, val, MemoryOrder, PRE_INTRIN_COMPUTE, POST_INTRIN_COMPUTE) \
134 integralType retIntegral; \
137 PRE_INTRIN_COMPUTE(valCompute, (val)); \
138 const integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, valCompute); \
140 EASTL_MSVC_ATOMIC_FETCH_OP(retIntegral, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), \
141 valIntegral, MemoryOrder, fetchIntrinsic); \
143 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
144 POST_INTRIN_COMPUTE(ret, ret, (val)); \
147 #define EASTL_MSVC_ATOMIC_EXCHANGE_INTRIN_N(integralType, exchangeIntrinsic, type, ret, ptr, val, MemoryOrder) \
149 integralType retIntegral; \
150 EASTL_MSVC_ATOMIC_EXCHANGE_OP(retIntegral, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), \
151 EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)), MemoryOrder, \
152 exchangeIntrinsic); \
154 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
157 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_INTRIN_N(integralType, cmpxchgStrongIntrinsic, type, ret, ptr, expected, desired, MemoryOrder) \
159 integralType comparandIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, *(expected)); \
160 integralType oldIntegral; \
161 EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_OP(oldIntegral, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), \
162 comparandIntegral, EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (desired)), \
163 MemoryOrder, cmpxchgStrongIntrinsic); \
165 if (oldIntegral == comparandIntegral) \
171 *(expected) = EASTL_ATOMIC_TYPE_PUN_CAST(type, oldIntegral); \
186 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_INTRIN_128(type, ret, ptr, expected, desired, MemoryOrder) \
197 struct exchange128 exchangePun; \
200 union TypePun typePun = { (desired) }; \
202 unsigned char cmpxchgRetChar; \
203 cmpxchgRetChar = EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_128_OP(cmpxchgRetChar, EASTL_ATOMIC_VOLATILE_TYPE_CAST(__int64, (ptr)), \
204 EASTL_ATOMIC_TYPE_CAST(__int64, (expected)), \
205 typePun.exchangePun.value[1], typePun.exchangePun.value[0], \
208 ret = static_cast<bool>(cmpxchgRetChar); \
215 #define EASTL_MSVC_ATOMIC_FETCH_OP_N(integralType, fetchIntrinsic, type, ret, ptr, val, MemoryOrder, PRE_INTRIN_COMPUTE) \
216 EASTL_MSVC_ATOMIC_FETCH_INTRIN_N(integralType, fetchIntrinsic, type, ret, ptr, val, MemoryOrder, PRE_INTRIN_COMPUTE, EASTL_MSVC_NOP_POST_INTRIN_COMPUTE)
218 #define EASTL_MSVC_ATOMIC_OP_FETCH_N(integralType, fetchIntrinsic, type, ret, ptr, val, MemoryOrder, PRE_INTRIN_COMPUTE, POST_INTRIN_COMPUTE) \
219 EASTL_MSVC_ATOMIC_FETCH_INTRIN_N(integralType, fetchIntrinsic, type, ret, ptr, val, MemoryOrder, PRE_INTRIN_COMPUTE, POST_INTRIN_COMPUTE)
221 #define EASTL_MSVC_ATOMIC_EXCHANGE_OP_N(integralType, exchangeIntrinsic, type, ret, ptr, val, MemoryOrder) \
222 EASTL_MSVC_ATOMIC_EXCHANGE_INTRIN_N(integralType, exchangeIntrinsic, type, ret, ptr, val, MemoryOrder)
224 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_OP_N(integralType, cmpxchgStrongIntrinsic, type, ret, ptr, expected, desired, MemoryOrder) \
225 EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_INTRIN_N(integralType, cmpxchgStrongIntrinsic, type, ret, ptr, expected, desired, MemoryOrder)
227 #define EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_OP_128(type, ret, ptr, expected, desired, MemoryOrder) \
228 EASTL_MSVC_ATOMIC_CMPXCHG_STRONG_INTRIN_128(type, ret, ptr, expected, desired, MemoryOrder)
234 #include "compiler_msvc_fetch_add.h"
235 #include "compiler_msvc_fetch_sub.h"
237 #include "compiler_msvc_fetch_and.h"
238 #include "compiler_msvc_fetch_xor.h"
239 #include "compiler_msvc_fetch_or.h"
241 #include "compiler_msvc_add_fetch.h"
242 #include "compiler_msvc_sub_fetch.h"
244 #include "compiler_msvc_and_fetch.h"
245 #include "compiler_msvc_xor_fetch.h"
246 #include "compiler_msvc_or_fetch.h"
248 #include "compiler_msvc_exchange.h"
250 #include "compiler_msvc_cmpxchg_weak.h"
251 #include "compiler_msvc_cmpxchg_strong.h"
253 #include "compiler_msvc_barrier.h"
255 #include "compiler_msvc_cpu_pause.h"
257 #include "compiler_msvc_signal_fence.h"
EA Standard Template Library.
Definition: algorithm.h:288
Definition: compiler_msvc.h:48