Nugget
arch_arm_load.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
15 //
16 // void EASTL_ARCH_ATOMIC_LOAD_*_N(type, type ret, type * ptr)
17 //
18 #if defined(EA_COMPILER_MSVC)
19 
20 
30  #if defined(EA_PROCESSOR_ARM32)
31 
32  #define EASTL_ARCH_ATOMIC_ARM32_LDREXD(ret, ptr) \
33  ret = __ldrexd((ptr))
34 
35  #endif
36 
37 
38  #define EASTL_ARCH_ATOMIC_ARM_LOAD_N(integralType, bits, type, ret, ptr) \
39  { \
40  integralType retIntegral; \
41  retIntegral = EA_PREPROCESSOR_JOIN(__iso_volatile_load, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr))); \
42  \
43  ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
44  }
45 
46 
47  #define EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr) \
48  EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int8, 8, type, ret, ptr)
49 
50  #define EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr) \
51  EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int16, 16, type, ret, ptr)
52 
53  #define EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr) \
54  EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int32, 32, type, ret, ptr)
55 
56 
57  #if defined(EA_PROCESSOR_ARM32)
58 
59 
60  #define EASTL_ARCH_ATOMIC_LOAD_64(type, ret, ptr) \
61  { \
62  __int64 loadRet64; \
63  EASTL_ARCH_ATOMIC_ARM32_LDREXD(loadRet64, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__int64, (ptr))); \
64  \
65  ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, loadRet64); \
66  }
67 
68  #else
69 
70  #define EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr) \
71  EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int64, 64, type, ret, ptr)
72 
73  #endif
74 
75 
85  #define EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, MemoryOrder) \
86  { \
87  bool cmpxchgRetBool; \
88  ret = *(ptr); \
89  do \
90  { \
91  EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRetBool, \
92  ptr, &(ret), ret); \
93  } while (!cmpxchgRetBool); \
94  }
95 
96 
97  #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
98  EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr)
99 
100  #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
101  EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr)
102 
103  #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
104  EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr)
105 
106  #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
107  EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr)
108 
109  #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
110  EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, RELAXED)
111 
112 
113  #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
114  EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \
115  EASTL_ATOMIC_CPU_MB()
116 
117  #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
118  EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \
119  EASTL_ATOMIC_CPU_MB()
120 
121  #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
122  EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \
123  EASTL_ATOMIC_CPU_MB()
124 
125  #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
126  EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \
127  EASTL_ATOMIC_CPU_MB()
128 
129  #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
130  EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, ACQUIRE)
131 
132 
133  #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
134  EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \
135  EASTL_ATOMIC_CPU_MB()
136 
137  #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
138  EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \
139  EASTL_ATOMIC_CPU_MB()
140 
141  #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
142  EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \
143  EASTL_ATOMIC_CPU_MB()
144 
145  #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
146  EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \
147  EASTL_ATOMIC_CPU_MB()
148 
149  #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
150  EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, SEQ_CST)
151 
152 
153 #endif
154 
155 
156 #endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H */