6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
18 #if defined(EA_COMPILER_MSVC)
21 #if defined(EA_COMPILER_MSVC) && (EA_COMPILER_VERSION >= 1920)
23 #define EASTL_ARCH_ATOMIC_X86_STORE_N(integralType, bits, type, ptr, val) \
24 EA_PREPROCESSOR_JOIN(__iso_volatile_store, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)))
28 #define EASTL_ARCH_ATOMIC_X86_STORE_N(integralType, bits, type, ptr, val) \
30 integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)); \
32 (*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)))) = valIntegral; \
38 #define EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, MemoryOrder) \
40 type exchange128; EA_UNUSED(exchange128); \
41 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \
45 #define EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val) \
46 EASTL_ARCH_ATOMIC_X86_STORE_N(__int8, 8, type, ptr, val)
48 #define EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val) \
49 EASTL_ARCH_ATOMIC_X86_STORE_N(__int16, 16, type, ptr, val)
51 #define EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val) \
52 EASTL_ARCH_ATOMIC_X86_STORE_N(__int32, 32, type, ptr, val)
54 #define EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val) \
55 EASTL_ARCH_ATOMIC_X86_STORE_N(__int64, 64, type, ptr, val)
58 #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8(type, ptr, val) \
59 EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val)
61 #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16(type, ptr, val) \
62 EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val)
64 #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32(type, ptr, val) \
65 EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val)
67 #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \
68 EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val)
70 #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
71 EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELAXED)
74 #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8(type, ptr, val) \
75 EASTL_ATOMIC_COMPILER_BARRIER(); \
76 EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val)
78 #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16(type, ptr, val) \
79 EASTL_ATOMIC_COMPILER_BARRIER(); \
80 EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val)
82 #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32(type, ptr, val) \
83 EASTL_ATOMIC_COMPILER_BARRIER(); \
84 EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val)
86 #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \
87 EASTL_ATOMIC_COMPILER_BARRIER(); \
88 EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val)
90 #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
91 EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELEASE)
94 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \
96 type exchange8; EA_UNUSED(exchange8); \
97 EASTL_ATOMIC_EXCHANGE_SEQ_CST_8(type, exchange8, ptr, val); \
100 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \
102 type exchange16; EA_UNUSED(exchange16); \
103 EASTL_ATOMIC_EXCHANGE_SEQ_CST_16(type, exchange16, ptr, val); \
106 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \
108 type exchange32; EA_UNUSED(exchange32); \
109 EASTL_ATOMIC_EXCHANGE_SEQ_CST_32(type, exchange32, ptr, val); \
119 #if defined(EA_PROCESSOR_X86)
122 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
123 EASTL_ATOMIC_COMPILER_BARRIER(); \
124 EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val); \
125 EASTL_ATOMIC_CPU_MB()
128 #elif defined(EA_PROCESSOR_X86_64)
131 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
133 type exchange64; EA_UNUSED(exchange64); \
134 EASTL_ATOMIC_EXCHANGE_SEQ_CST_64(type, exchange64, ptr, val); \
141 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
142 EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, SEQ_CST)
148 #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
151 #define EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, MemoryOrder) \
153 type exchange128; EA_UNUSED(exchange128); \
154 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \
158 #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
159 EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELAXED)
161 #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
162 EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELEASE)
164 #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
165 EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, SEQ_CST)