Nugget
arch_x86_fetch_xor.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
15 //
16 // void EASTL_ARCH_ATOMIC_FETCH_XOR_*_N(type, type ret, type * ptr, type val)
17 //
18 #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 
20 
21  #define EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED(ret, observed, val) \
22  ret = ((observed) ^ (val))
23 
24 
25  #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_64(type, ret, ptr, val) \
26  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
27  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
28  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
29 
30  #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_64(type, ret, ptr, val) \
31  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
32  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
33  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
34 
35  #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_64(type, ret, ptr, val) \
36  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
37  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
38  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
39 
40  #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_64(type, ret, ptr, val) \
41  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
42  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
43  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
44 
45  #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_64(type, ret, ptr, val) \
46  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
47  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
48  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
49 
50 
51 #endif
52 
53 
54 #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
55 
56 
57  #define EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED(ret, observed, val) \
58  ret = ((observed) ^ (val))
59 
60 
61  #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_128(type, ret, ptr, val) \
62  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
63  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
64  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
65 
66  #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_128(type, ret, ptr, val) \
67  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
68  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
69  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
70 
71  #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_128(type, ret, ptr, val) \
72  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
73  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
74  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
75 
76  #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_128(type, ret, ptr, val) \
77  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
78  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
79  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
80 
81  #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_128(type, ret, ptr, val) \
82  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
83  EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \
84  EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET)
85 
86 
87 #endif
88 
89 
90 #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H */