Nugget
arch_x86_sub_fetch.h
1 // Copyright (c) Electronic Arts Inc. All rights reserved.
4 
5 
6 #ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_SUB_FETCH_H
7 #define EASTL_ATOMIC_INTERNAL_ARCH_X86_SUB_FETCH_H
8 
9 #if defined(EA_PRAGMA_ONCE_SUPPORTED)
10  #pragma once
11 #endif
12 
13 
15 //
16 // void EASTL_ARCH_ATOMIC_SUB_FETCH_*_N(type, type ret, type * ptr, type val)
17 //
18 #if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86)
19 
20 
21  #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \
22  ret = ((observed) - (val))
23 
24  #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \
25  ret = ((prevObserved) - (val))
26 
27 
28  #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_64(type, ret, ptr, val) \
29  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \
30  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
31  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
32 
33  #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_64(type, ret, ptr, val) \
34  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \
35  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
36  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
37 
38  #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_64(type, ret, ptr, val) \
39  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \
40  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
41  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
42 
43  #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_64(type, ret, ptr, val) \
44  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \
45  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
46  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
47 
48  #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_64(type, ret, ptr, val) \
49  EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \
50  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
51  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
52 
53 
54 #endif
55 
56 
57 #if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64))
58 
59 
60  #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \
61  ret = ((observed) - (val))
62 
63  #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \
64  ret = ((prevObserved) - (val))
65 
66 
67  #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_128(type, ret, ptr, val) \
68  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \
69  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
70  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
71 
72  #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_128(type, ret, ptr, val) \
73  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \
74  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
75  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
76 
77  #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_128(type, ret, ptr, val) \
78  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \
79  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
80  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
81 
82  #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_128(type, ret, ptr, val) \
83  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \
84  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
85  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
86 
87  #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_128(type, ret, ptr, val) \
88  EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \
89  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \
90  EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET)
91 
92 
93 #endif
94 
95 
96 #endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_SUB_FETCH_H */