aboutsummaryrefslogtreecommitdiff
path: root/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_fetch_sub.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_fetch_sub.h')
-rw-r--r--include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_fetch_sub.h104
1 files changed, 104 insertions, 0 deletions
diff --git a/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_fetch_sub.h b/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_fetch_sub.h
new file mode 100644
index 0000000..6d5d9e3
--- /dev/null
+++ b/include/EASTL/internal/atomic/compiler/msvc/compiler_msvc_fetch_sub.h
@@ -0,0 +1,104 @@
+/////////////////////////////////////////////////////////////////////////////////
+// Copyright (c) Electronic Arts Inc. All rights reserved.
+/////////////////////////////////////////////////////////////////////////////////
+
+
+#ifndef EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_FETCH_SUB_H
+#define EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_FETCH_SUB_H
+
+#if defined(EA_PRAGMA_ONCE_SUPPORTED)
+ #pragma once
+#endif
+
+
+#define EASTL_MSVC_FETCH_SUB_PRE_INTRIN_COMPUTE(ret, val) \
+ ret = EASTL_ATOMIC_NEGATE_OPERAND((val))
+
+#define EASTL_MSVC_ATOMIC_FETCH_SUB_N(integralType, subIntrinsic, type, ret, ptr, val, MemoryOrder) \
+ EASTL_MSVC_ATOMIC_FETCH_OP_N(integralType, subIntrinsic, type, ret, ptr, val, MemoryOrder, \
+ EASTL_MSVC_FETCH_SUB_PRE_INTRIN_COMPUTE)
+
+
+#define EASTL_MSVC_ATOMIC_FETCH_SUB_8(type, ret, ptr, val, MemoryOrder) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_N(char, _InterlockedExchangeAdd8, type, ret, ptr, val, MemoryOrder)
+
+#define EASTL_MSVC_ATOMIC_FETCH_SUB_16(type, ret, ptr, val, MemoryOrder) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_N(short, _InterlockedExchangeAdd16, type, ret, ptr, val, MemoryOrder)
+
+#define EASTL_MSVC_ATOMIC_FETCH_SUB_32(type, ret, ptr, val, MemoryOrder) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_N(long, _InterlockedExchangeAdd, type, ret, ptr, val, MemoryOrder)
+
+#define EASTL_MSVC_ATOMIC_FETCH_SUB_64(type, ret, ptr, val, MemoryOrder) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_N(__int64, _InterlockedExchangeAdd64, type, ret, ptr, val, MemoryOrder)
+
+
+/////////////////////////////////////////////////////////////////////////////////
+//
+// void EASTL_COMPILER_ATOMIC_FETCH_SUB_*_N(type, type ret, type * ptr, type val)
+//
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELAXED_8(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_8(type, ret, ptr, val, RELAXED)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELAXED_16(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_16(type, ret, ptr, val, RELAXED)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELAXED_32(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_32(type, ret, ptr, val, RELAXED)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELAXED_64(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_64(type, ret, ptr, val, RELAXED)
+
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQUIRE_8(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_8(type, ret, ptr, val, ACQUIRE)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQUIRE_16(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_16(type, ret, ptr, val, ACQUIRE)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQUIRE_32(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_32(type, ret, ptr, val, ACQUIRE)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQUIRE_64(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_64(type, ret, ptr, val, ACQUIRE)
+
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELEASE_8(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_8(type, ret, ptr, val, RELEASE)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELEASE_16(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_16(type, ret, ptr, val, RELEASE)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELEASE_32(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_32(type, ret, ptr, val, RELEASE)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_RELEASE_64(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_64(type, ret, ptr, val, RELEASE)
+
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQ_REL_8(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_8(type, ret, ptr, val, ACQ_REL)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQ_REL_16(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_16(type, ret, ptr, val, ACQ_REL)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQ_REL_32(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_32(type, ret, ptr, val, ACQ_REL)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_ACQ_REL_64(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_64(type, ret, ptr, val, ACQ_REL)
+
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_SEQ_CST_8(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_8(type, ret, ptr, val, SEQ_CST)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_SEQ_CST_16(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_16(type, ret, ptr, val, SEQ_CST)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_SEQ_CST_32(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_32(type, ret, ptr, val, SEQ_CST)
+
+#define EASTL_COMPILER_ATOMIC_FETCH_SUB_SEQ_CST_64(type, ret, ptr, val) \
+ EASTL_MSVC_ATOMIC_FETCH_SUB_64(type, ret, ptr, val, SEQ_CST)
+
+
+#endif /* EASTL_ATOMIC_INTERNAL_COMPILER_MSVC_FETCH_SUB_H */