Nugget
compiler_gcc.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_H
7 #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
27 #if 0
28 static_assert(__atomic_always_lock_free(1, 0), "eastl::atomic<T> where sizeof(T) == 1 must be lock-free!");
29 static_assert(__atomic_always_lock_free(2, 0), "eastl::atomic<T> where sizeof(T) == 2 must be lock-free!");
30 static_assert(__atomic_always_lock_free(4, 0), "eastl::atomic<T> where sizeof(T) == 4 must be lock-free!");
31 #endif
32 #if EA_PLATFORM_PTR_SIZE == 8
33  static_assert(__atomic_always_lock_free(8, 0), "eastl::atomic<T> where sizeof(T) == 8 must be lock-free!");
34 #endif
35 
69 
70 
71 #define EASTL_COMPILER_ATOMIC_HAS_8BIT
72 #define EASTL_COMPILER_ATOMIC_HAS_16BIT
73 #define EASTL_COMPILER_ATOMIC_HAS_32BIT
74 #define EASTL_COMPILER_ATOMIC_HAS_64BIT
75 
76 #if EA_PLATFORM_PTR_SIZE == 8
77  #define EASTL_COMPILER_ATOMIC_HAS_128BIT
78 #endif
79 
80 
82 
83 
84 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_8 uint8_t
85 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_16 uint16_t
86 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_32 uint32_t
87 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_64 uint64_t
88 #define EASTL_COMPILER_ATOMIC_FIXED_WIDTH_TYPE_128 __uint128_t
89 
90 
92 
93 
94 #define EASTL_GCC_ATOMIC_FETCH_INTRIN_N(integralType, fetchIntrinsic, type, ret, ptr, val, gccMemoryOrder) \
95  { \
96  integralType retIntegral; \
97  integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)); \
98  \
99  retIntegral = fetchIntrinsic(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), valIntegral, gccMemoryOrder); \
100  \
101  ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
102  }
103 
104 #define EASTL_GCC_ATOMIC_CMPXCHG_INTRIN_N(integralType, type, ret, ptr, expected, desired, weak, successOrder, failOrder) \
105  ret = __atomic_compare_exchange(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), \
106  EASTL_ATOMIC_INTEGRAL_CAST(integralType, (expected)), \
107  EASTL_ATOMIC_INTEGRAL_CAST(integralType, &(desired)), \
108  weak, successOrder, failOrder)
109 
110 #define EASTL_GCC_ATOMIC_EXCHANGE_INTRIN_N(integralType, type, ret, ptr, val, gccMemoryOrder) \
111  { \
112  integralType retIntegral; \
113  integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)); \
114  \
115  __atomic_exchange(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), \
116  &valIntegral, &retIntegral, gccMemoryOrder); \
117  \
118  ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
119  }
120 
121 
123 
124 
125 #include "compiler_gcc_fetch_add.h"
126 #include "compiler_gcc_fetch_sub.h"
127 
128 #include "compiler_gcc_fetch_and.h"
129 #include "compiler_gcc_fetch_xor.h"
130 #include "compiler_gcc_fetch_or.h"
131 
132 #include "compiler_gcc_add_fetch.h"
133 #include "compiler_gcc_sub_fetch.h"
134 
135 #include "compiler_gcc_and_fetch.h"
136 #include "compiler_gcc_xor_fetch.h"
137 #include "compiler_gcc_or_fetch.h"
138 
139 #include "compiler_gcc_exchange.h"
140 
141 #include "compiler_gcc_cmpxchg_weak.h"
142 #include "compiler_gcc_cmpxchg_strong.h"
143 
144 #include "compiler_gcc_load.h"
145 #include "compiler_gcc_store.h"
146 
147 #include "compiler_gcc_barrier.h"
148 
149 #include "compiler_gcc_cpu_pause.h"
150 
151 #include "compiler_gcc_signal_fence.h"
152 
153 #include "compiler_gcc_thread_fence.h"
154 
155 
156 #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_H */