Nugget
compiler_msvc_xor_fetch.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_XOR_FETCH_H
7 #define EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_XOR_FETCH_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
14 #if defined(EA_PROCESSOR_X86_64)
15 
16  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_8 _InterlockedXor8_np
17  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_16 _InterlockedXor16_np
18  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_32 _InterlockedXor_np
19  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_64 _InterlockedXor64_np
20 
21 #else
22 
23  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_8 _InterlockedXor8
24  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_16 _InterlockedXor16
25  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_32 _InterlockedXor
26  #define EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_64 _InterlockedXor64
27 
28 #endif
29 
30 
31 #define EASTL_MSVC_XOR_FETCH_POST_INTRIN_COMPUTE(ret, val, xorend) \
32  ret = (val) ^ (xorend)
33 
34 #define EASTL_MSVC_ATOMIC_XOR_FETCH_N(integralType, xorIntrinsic, type, ret, ptr, val, MemoryOrder) \
35  EASTL_MSVC_ATOMIC_OP_FETCH_N(integralType, xorIntrinsic, type, ret, ptr, val, MemoryOrder, \
36  EASTL_MSVC_NOP_PRE_INTRIN_COMPUTE, EASTL_MSVC_XOR_FETCH_POST_INTRIN_COMPUTE)
37 
38 
39 #define EASTL_MSVC_ATOMIC_XOR_FETCH_8(type, ret, ptr, val, MemoryOrder) \
40  EASTL_MSVC_ATOMIC_XOR_FETCH_N(char, EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_8, type, ret, ptr, val, MemoryOrder)
41 
42 #define EASTL_MSVC_ATOMIC_XOR_FETCH_16(type, ret, ptr, val, MemoryOrder) \
43  EASTL_MSVC_ATOMIC_XOR_FETCH_N(short, EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_16, type, ret, ptr, val, MemoryOrder)
44 
45 #define EASTL_MSVC_ATOMIC_XOR_FETCH_32(type, ret, ptr, val, MemoryOrder) \
46  EASTL_MSVC_ATOMIC_XOR_FETCH_N(long, EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_32, type, ret, ptr, val, MemoryOrder)
47 
48 #define EASTL_MSVC_ATOMIC_XOR_FETCH_64(type, ret, ptr, val, MemoryOrder) \
49  EASTL_MSVC_ATOMIC_XOR_FETCH_N(__int64, EASTL_MSVC_ATOMIC_XOR_FETCH_INTRIN_64, type, ret, ptr, val, MemoryOrder)
50 
51 
53 //
54 // void EASTL_COMPILER_ATOMIC_XOR_FETCH_*_N(type, type ret, type * ptr, type val)
55 //
56 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELAXED_8(type, ret, ptr, val) \
57  EASTL_MSVC_ATOMIC_XOR_FETCH_8(type, ret, ptr, val, RELAXED)
58 
59 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELAXED_16(type, ret, ptr, val) \
60  EASTL_MSVC_ATOMIC_XOR_FETCH_16(type, ret, ptr, val, RELAXED)
61 
62 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELAXED_32(type, ret, ptr, val) \
63  EASTL_MSVC_ATOMIC_XOR_FETCH_32(type, ret, ptr, val, RELAXED)
64 
65 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELAXED_64(type, ret, ptr, val) \
66  EASTL_MSVC_ATOMIC_XOR_FETCH_64(type, ret, ptr, val, RELAXED)
67 
68 
69 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQUIRE_8(type, ret, ptr, val) \
70  EASTL_MSVC_ATOMIC_XOR_FETCH_8(type, ret, ptr, val, ACQUIRE)
71 
72 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQUIRE_16(type, ret, ptr, val) \
73  EASTL_MSVC_ATOMIC_XOR_FETCH_16(type, ret, ptr, val, ACQUIRE)
74 
75 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQUIRE_32(type, ret, ptr, val) \
76  EASTL_MSVC_ATOMIC_XOR_FETCH_32(type, ret, ptr, val, ACQUIRE)
77 
78 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQUIRE_64(type, ret, ptr, val) \
79  EASTL_MSVC_ATOMIC_XOR_FETCH_64(type, ret, ptr, val, ACQUIRE)
80 
81 
82 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELEASE_8(type, ret, ptr, val) \
83  EASTL_MSVC_ATOMIC_XOR_FETCH_8(type, ret, ptr, val, RELEASE)
84 
85 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELEASE_16(type, ret, ptr, val) \
86  EASTL_MSVC_ATOMIC_XOR_FETCH_16(type, ret, ptr, val, RELEASE)
87 
88 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELEASE_32(type, ret, ptr, val) \
89  EASTL_MSVC_ATOMIC_XOR_FETCH_32(type, ret, ptr, val, RELEASE)
90 
91 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_RELEASE_64(type, ret, ptr, val) \
92  EASTL_MSVC_ATOMIC_XOR_FETCH_64(type, ret, ptr, val, RELEASE)
93 
94 
95 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQ_REL_8(type, ret, ptr, val) \
96  EASTL_MSVC_ATOMIC_XOR_FETCH_8(type, ret, ptr, val, ACQ_REL)
97 
98 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQ_REL_16(type, ret, ptr, val) \
99  EASTL_MSVC_ATOMIC_XOR_FETCH_16(type, ret, ptr, val, ACQ_REL)
100 
101 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQ_REL_32(type, ret, ptr, val) \
102  EASTL_MSVC_ATOMIC_XOR_FETCH_32(type, ret, ptr, val, ACQ_REL)
103 
104 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_ACQ_REL_64(type, ret, ptr, val) \
105  EASTL_MSVC_ATOMIC_XOR_FETCH_64(type, ret, ptr, val, ACQ_REL)
106 
107 
108 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_SEQ_CST_8(type, ret, ptr, val) \
109  EASTL_MSVC_ATOMIC_XOR_FETCH_8(type, ret, ptr, val, SEQ_CST)
110 
111 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_SEQ_CST_16(type, ret, ptr, val) \
112  EASTL_MSVC_ATOMIC_XOR_FETCH_16(type, ret, ptr, val, SEQ_CST)
113 
114 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_SEQ_CST_32(type, ret, ptr, val) \
115  EASTL_MSVC_ATOMIC_XOR_FETCH_32(type, ret, ptr, val, SEQ_CST)
116 
117 #define EASTL_COMPILER_ATOMIC_XOR_FETCH_SEQ_CST_64(type, ret, ptr, val) \
118  EASTL_MSVC_ATOMIC_XOR_FETCH_64(type, ret, ptr, val, SEQ_CST)
119 
120 
121 #endif /* EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_XOR_FETCH_H */