Nugget
compiler_gcc_load.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_GCC_LOAD_H
7 #define EASTL_ATOMIC_INTERNAL_COMPILER_GCC_LOAD_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
14 #define EASTL_GCC_ATOMIC_LOAD_N(integralType, type, ret, ptr, gccMemoryOrder) \
15  { \
16  integralType retIntegral; \
17  __atomic_load(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), &retIntegral, gccMemoryOrder); \
18  \
19  ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
20  }
21 
22 #define EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, gccMemoryOrder) \
23  EASTL_GCC_ATOMIC_LOAD_N(uint8_t, type, ret, ptr, gccMemoryOrder)
24 
25 #define EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, gccMemoryOrder) \
26  EASTL_GCC_ATOMIC_LOAD_N(uint16_t, type, ret, ptr, gccMemoryOrder)
27 
28 #define EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, gccMemoryOrder) \
29  EASTL_GCC_ATOMIC_LOAD_N(uint32_t, type, ret, ptr, gccMemoryOrder)
30 
31 #define EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, gccMemoryOrder) \
32  EASTL_GCC_ATOMIC_LOAD_N(uint64_t, type, ret, ptr, gccMemoryOrder)
33 
34 #define EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, gccMemoryOrder) \
35  EASTL_GCC_ATOMIC_LOAD_N(__uint128_t, type, ret, ptr, gccMemoryOrder)
36 
37 
39 //
40 // void EASTL_COMPILER_ATOMIC_LOAD_*_N(type, type ret, type * ptr)
41 //
42 #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
43  EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, __ATOMIC_RELAXED)
44 
45 #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
46  EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, __ATOMIC_RELAXED)
47 
48 #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
49  EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, __ATOMIC_RELAXED)
50 
51 #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
52  EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, __ATOMIC_RELAXED)
53 
54 #define EASTL_COMPILER_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
55  EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, __ATOMIC_RELAXED)
56 
57 
58 #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
59  EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, __ATOMIC_ACQUIRE)
60 
61 #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
62  EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, __ATOMIC_ACQUIRE)
63 
64 #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
65  EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, __ATOMIC_ACQUIRE)
66 
67 #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
68  EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, __ATOMIC_ACQUIRE)
69 
70 #define EASTL_COMPILER_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
71  EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, __ATOMIC_ACQUIRE)
72 
73 
74 #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
75  EASTL_GCC_ATOMIC_LOAD_8(type, ret, ptr, __ATOMIC_SEQ_CST)
76 
77 #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
78  EASTL_GCC_ATOMIC_LOAD_16(type, ret, ptr, __ATOMIC_SEQ_CST)
79 
80 #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
81  EASTL_GCC_ATOMIC_LOAD_32(type, ret, ptr, __ATOMIC_SEQ_CST)
82 
83 #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
84  EASTL_GCC_ATOMIC_LOAD_64(type, ret, ptr, __ATOMIC_SEQ_CST)
85 
86 #define EASTL_COMPILER_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
87  EASTL_GCC_ATOMIC_LOAD_128(type, ret, ptr, __ATOMIC_SEQ_CST)
88 
89 
90 #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_GCC_LOAD_H */