6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_LOAD_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_X86_LOAD_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
19 #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
30 #define EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, MemoryOrder) \
32 EASTL_ATOMIC_FIXED_WIDTH_TYPE_128 expected = 0; \
33 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, expected); \
36 __asm__ __volatile__ ("lock; cmpxchg16b %2" \
38 : "=a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[0]), "=d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[1]), \
39 "+m"(*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__uint128_t, (ptr)))) \
41 : "b"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[0]), "c"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[1]), \
42 "a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[0]), "d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[1]) \
48 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
49 EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, RELAXED)
51 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
52 EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, ACQUIRE)
54 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
55 EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, SEQ_CST)
57 #elif defined(EA_COMPILER_MSVC)
60 #if defined(EA_COMPILER_MSVC) && (EA_COMPILER_VERSION >= 1920)
62 #define EASTL_ARCH_ATOMIC_X86_LOAD_N(integralType, bits, type, ret, ptr) \
64 integralType retIntegral; \
65 retIntegral = EA_PREPROCESSOR_JOIN(__iso_volatile_load, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr))); \
67 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
72 #define EASTL_ARCH_ATOMIC_X86_LOAD_N(integralType, bits, type, ret, ptr) \
74 integralType retIntegral; \
75 retIntegral = (*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)))); \
77 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
83 #define EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, MemoryOrder) \
85 EASTL_ATOMIC_FIXED_WIDTH_TYPE_128 expected{0, 0}; \
86 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, expected); \
88 bool cmpxchgRetBool; EA_UNUSED(cmpxchgRetBool); \
89 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRetBool, ptr, &(ret), ret); \
93 #define EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr) \
94 EASTL_ARCH_ATOMIC_X86_LOAD_N(__int8, 8, type, ret, ptr)
96 #define EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr) \
97 EASTL_ARCH_ATOMIC_X86_LOAD_N(__int16, 16, type, ret, ptr)
99 #define EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr) \
100 EASTL_ARCH_ATOMIC_X86_LOAD_N(__int32, 32, type, ret, ptr)
102 #define EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr) \
103 EASTL_ARCH_ATOMIC_X86_LOAD_N(__int64, 64, type, ret, ptr)
106 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
107 EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr)
109 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
110 EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr)
112 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
113 EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr)
115 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
116 EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr)
118 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
119 EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, RELAXED)
122 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
123 EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr); \
124 EASTL_ATOMIC_COMPILER_BARRIER()
126 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
127 EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr); \
128 EASTL_ATOMIC_COMPILER_BARRIER()
130 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
131 EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr); \
132 EASTL_ATOMIC_COMPILER_BARRIER()
134 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
135 EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr); \
136 EASTL_ATOMIC_COMPILER_BARRIER()
138 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
139 EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, ACQUIRE)
142 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
143 EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr); \
144 EASTL_ATOMIC_COMPILER_BARRIER()
146 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
147 EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr); \
148 EASTL_ATOMIC_COMPILER_BARRIER()
150 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
151 EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr); \
152 EASTL_ATOMIC_COMPILER_BARRIER()
154 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
155 EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr); \
156 EASTL_ATOMIC_COMPILER_BARRIER()
158 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
159 EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, SEQ_CST)