6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
18 #if defined(EA_COMPILER_MSVC)
30 #if defined(EA_PROCESSOR_ARM32)
32 #define EASTL_ARCH_ATOMIC_ARM32_LDREXD(ret, ptr) \
38 #define EASTL_ARCH_ATOMIC_ARM_LOAD_N(integralType, bits, type, ret, ptr) \
40 integralType retIntegral; \
41 retIntegral = EA_PREPROCESSOR_JOIN(__iso_volatile_load, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr))); \
43 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
47 #define EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr) \
48 EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int8, 8, type, ret, ptr)
50 #define EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr) \
51 EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int16, 16, type, ret, ptr)
53 #define EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr) \
54 EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int32, 32, type, ret, ptr)
57 #if defined(EA_PROCESSOR_ARM32)
60 #define EASTL_ARCH_ATOMIC_LOAD_64(type, ret, ptr) \
63 EASTL_ARCH_ATOMIC_ARM32_LDREXD(loadRet64, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__int64, (ptr))); \
65 ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, loadRet64); \
70 #define EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr) \
71 EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int64, 64, type, ret, ptr)
85 #define EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, MemoryOrder) \
87 bool cmpxchgRetBool; \
91 EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRetBool, \
93 } while (!cmpxchgRetBool); \
97 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
98 EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr)
100 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
101 EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr)
103 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
104 EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr)
106 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
107 EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr)
109 #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
110 EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, RELAXED)
113 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
114 EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \
115 EASTL_ATOMIC_CPU_MB()
117 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
118 EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \
119 EASTL_ATOMIC_CPU_MB()
121 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
122 EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \
123 EASTL_ATOMIC_CPU_MB()
125 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
126 EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \
127 EASTL_ATOMIC_CPU_MB()
129 #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
130 EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, ACQUIRE)
133 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
134 EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \
135 EASTL_ATOMIC_CPU_MB()
137 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
138 EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \
139 EASTL_ATOMIC_CPU_MB()
141 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
142 EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \
143 EASTL_ATOMIC_CPU_MB()
145 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
146 EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \
147 EASTL_ATOMIC_CPU_MB()
149 #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
150 EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, SEQ_CST)