Nugget
arch_x86_store.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
15 //
16 // void EASTL_ARCH_ATOMIC_STORE_*_N(type, type * ptr, type val)
17 //
18 #if defined(EA_COMPILER_MSVC)
19 
20 
21  #if defined(EA_COMPILER_MSVC) && (EA_COMPILER_VERSION >= 1920) // >= VS2019
22 
23  #define EASTL_ARCH_ATOMIC_X86_STORE_N(integralType, bits, type, ptr, val) \
24  EA_PREPROCESSOR_JOIN(__iso_volatile_store, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)))
25 
26  #else
27 
28  #define EASTL_ARCH_ATOMIC_X86_STORE_N(integralType, bits, type, ptr, val) \
29  { \
30  integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)); \
31  \
32  (*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)))) = valIntegral; \
33  }
34 
35  #endif
36 
37 
38  #define EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, MemoryOrder) \
39  { \
40  type exchange128; EA_UNUSED(exchange128); \
41  EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \
42  }
43 
44 
45  #define EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val) \
46  EASTL_ARCH_ATOMIC_X86_STORE_N(__int8, 8, type, ptr, val)
47 
48  #define EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val) \
49  EASTL_ARCH_ATOMIC_X86_STORE_N(__int16, 16, type, ptr, val)
50 
51  #define EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val) \
52  EASTL_ARCH_ATOMIC_X86_STORE_N(__int32, 32, type, ptr, val)
53 
54  #define EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val) \
55  EASTL_ARCH_ATOMIC_X86_STORE_N(__int64, 64, type, ptr, val)
56 
57 
58  #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8(type, ptr, val) \
59  EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val)
60 
61  #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16(type, ptr, val) \
62  EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val)
63 
64  #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32(type, ptr, val) \
65  EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val)
66 
67  #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \
68  EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val)
69 
70  #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
71  EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELAXED)
72 
73 
74  #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8(type, ptr, val) \
75  EASTL_ATOMIC_COMPILER_BARRIER(); \
76  EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val)
77 
78  #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16(type, ptr, val) \
79  EASTL_ATOMIC_COMPILER_BARRIER(); \
80  EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val)
81 
82  #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32(type, ptr, val) \
83  EASTL_ATOMIC_COMPILER_BARRIER(); \
84  EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val)
85 
86  #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \
87  EASTL_ATOMIC_COMPILER_BARRIER(); \
88  EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val)
89 
90  #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
91  EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELEASE)
92 
93 
94  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \
95  { \
96  type exchange8; EA_UNUSED(exchange8); \
97  EASTL_ATOMIC_EXCHANGE_SEQ_CST_8(type, exchange8, ptr, val); \
98  }
99 
100  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \
101  { \
102  type exchange16; EA_UNUSED(exchange16); \
103  EASTL_ATOMIC_EXCHANGE_SEQ_CST_16(type, exchange16, ptr, val); \
104  }
105 
106  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \
107  { \
108  type exchange32; EA_UNUSED(exchange32); \
109  EASTL_ATOMIC_EXCHANGE_SEQ_CST_32(type, exchange32, ptr, val); \
110  }
111 
112 
119  #if defined(EA_PROCESSOR_X86)
120 
121 
122  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
123  EASTL_ATOMIC_COMPILER_BARRIER(); \
124  EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val); \
125  EASTL_ATOMIC_CPU_MB()
126 
127 
128  #elif defined(EA_PROCESSOR_X86_64)
129 
130 
131  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
132  { \
133  type exchange64; EA_UNUSED(exchange64); \
134  EASTL_ATOMIC_EXCHANGE_SEQ_CST_64(type, exchange64, ptr, val); \
135  }
136 
137 
138  #endif
139 
140 
141  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
142  EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, SEQ_CST)
143 
144 
145 #endif
146 
147 
148 #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
149 
150 
151  #define EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, MemoryOrder) \
152  { \
153  type exchange128; EA_UNUSED(exchange128); \
154  EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \
155  }
156 
157 
158  #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
159  EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELAXED)
160 
161  #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
162  EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELEASE)
163 
164  #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
165  EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, SEQ_CST)
166 
167 
168 #endif
169 
170 
171 #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H */