aboutsummaryrefslogtreecommitdiff
path: root/include/EASTL/internal/atomic/arch/arm
diff options
context:
space:
mode:
Diffstat (limited to 'include/EASTL/internal/atomic/arch/arm')
-rw-r--r--include/EASTL/internal/atomic/arch/arm/arch_arm.h89
-rw-r--r--include/EASTL/internal/atomic/arch/arm/arch_arm_load.h156
-rw-r--r--include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h97
-rw-r--r--include/EASTL/internal/atomic/arch/arm/arch_arm_store.h142
-rw-r--r--include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h37
5 files changed, 0 insertions, 521 deletions
diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm.h b/include/EASTL/internal/atomic/arch/arm/arch_arm.h
deleted file mode 100644
index cc2ce52..0000000
--- a/include/EASTL/internal/atomic/arch/arm/arch_arm.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/////////////////////////////////////////////////////////////////////////////////
-// Copyright (c) Electronic Arts Inc. All rights reserved.
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_H
-#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_H
-
-#if defined(EA_PRAGMA_ONCE_SUPPORTED)
- #pragma once
-#endif
-
-
-/**
- * NOTE: We use this mapping
- *
- * ARMv7 Mapping 'trailing sync;':
- *
- * Load Relaxed : ldr
- * Load Acquire : ldr; dmb ish
- * Load Seq_Cst : ldr; dmb ish
- *
- * Store Relaxed : str
- * Store Release : dmb ish; str
- * Store Seq_Cst : dmb ish; str; dmb ish
- *
- * Relaxed Fence :
- * Acquire Fence : dmb ish
- * Release Fence : dmb ish
- * Acq_Rel Fence : dmb ish
- * Seq_Cst Fence : dmb ish
- */
-
-/**
- * ARMv7 Mapping 'leading sync;';
- *
- * Load Relaxed : ldr
- * Load Acquire : ldr; dmb ish
- * Load Seq_Cst : dmb ish; ldr; dmb ish
- *
- * Store Relaxed : str
- * Store Release : dmb ish; str
- * Store Seq_Cst : dmb ish: str
- *
- * Relaxed Fence :
- * Acquire Fence : dmb ish
- * Release Fence : dmb ish
- * Acq_Rel Fence : dmb ish
- * Seq_Cst Fence : dmb ish
- */
-
-/**
- * NOTE:
- *
- * On ARM32/64, we use the 'trailing sync;' convention with the stricter load acquire that uses
- * a dmb instead of a control dependency + isb to ensure the IRIW litmus test is satisfied
- * as one reason. See EASTL/atomic.h for futher explanation and deep-dive.
- *
- * For ARMv8 we could move to use the new proper store release and load acquire, RCsc variant.
- * All ARMv7 approaches work on ARMv8 and this code path is only used on msvc which isn't used
- * heavily. Most of the ARM code will end up going thru clang or gcc since microsoft arm devices
- * aren't that abundant.
- */
-
-
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#if defined(EA_COMPILER_MSVC)
-
- #if EA_PLATFORM_PTR_SIZE == 8
- #define EASTL_ARCH_ATOMIC_HAS_128BIT
- #endif
-
-#endif
-
-
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#include "arch_arm_load.h"
-#include "arch_arm_store.h"
-
-#include "arch_arm_memory_barrier.h"
-
-#include "arch_arm_thread_fence.h"
-
-
-#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_H */
diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_load.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_load.h
deleted file mode 100644
index e3b79b8..0000000
--- a/include/EASTL/internal/atomic/arch/arm/arch_arm_load.h
+++ /dev/null
@@ -1,156 +0,0 @@
-/////////////////////////////////////////////////////////////////////////////////
-// Copyright (c) Electronic Arts Inc. All rights reserved.
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H
-#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H
-
-#if defined(EA_PRAGMA_ONCE_SUPPORTED)
- #pragma once
-#endif
-
-
-/////////////////////////////////////////////////////////////////////////////////
-//
-// void EASTL_ARCH_ATOMIC_LOAD_*_N(type, type ret, type * ptr)
-//
-#if defined(EA_COMPILER_MSVC)
-
-
- /**
- * NOTE:
- *
- * Even 8-byte aligned 64-bit memory accesses on ARM32 are not
- * guaranteed to be atomic on all ARM32 cpus. Only guaranteed on
- * cpus with the LPAE extension. We need to use a
- * ldrexd instruction in order to ensure no shearing is observed
- * for all ARM32 processors.
- */
- #if defined(EA_PROCESSOR_ARM32)
-
- #define EASTL_ARCH_ATOMIC_ARM32_LDREXD(ret, ptr) \
- ret = __ldrexd((ptr))
-
- #endif
-
-
- #define EASTL_ARCH_ATOMIC_ARM_LOAD_N(integralType, bits, type, ret, ptr) \
- { \
- integralType retIntegral; \
- retIntegral = EA_PREPROCESSOR_JOIN(__iso_volatile_load, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr))); \
- \
- ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \
- }
-
-
- #define EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int8, 8, type, ret, ptr)
-
- #define EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int16, 16, type, ret, ptr)
-
- #define EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int32, 32, type, ret, ptr)
-
-
- #if defined(EA_PROCESSOR_ARM32)
-
-
- #define EASTL_ARCH_ATOMIC_LOAD_64(type, ret, ptr) \
- { \
- __int64 loadRet64; \
- EASTL_ARCH_ATOMIC_ARM32_LDREXD(loadRet64, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__int64, (ptr))); \
- \
- ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, loadRet64); \
- }
-
- #else
-
- #define EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int64, 64, type, ret, ptr)
-
- #endif
-
-
- /**
- * NOTE:
- *
- * The ARM documentation states the following:
- * A 64-bit pair requires the address to be quadword aligned and is single-copy atomic for each doubleword at doubleword granularity
- *
- * Thus we must ensure the store succeeds inorder for the load to be observed as atomic.
- * Thus we must use the full cmpxchg in order to do a proper atomic load.
- */
- #define EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, MemoryOrder) \
- { \
- bool cmpxchgRetBool; \
- ret = *(ptr); \
- do \
- { \
- EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRetBool, \
- ptr, &(ret), ret); \
- } while (!cmpxchgRetBool); \
- }
-
-
- #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr)
-
- #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr)
-
- #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr)
-
- #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr)
-
- #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, RELAXED)
-
-
- #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, ACQUIRE)
-
-
- #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \
- EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, SEQ_CST)
-
-
-#endif
-
-
-#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H */
diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h
deleted file mode 100644
index 44dc991..0000000
--- a/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/////////////////////////////////////////////////////////////////////////////////
-// Copyright (c) Electronic Arts Inc. All rights reserved.
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H
-#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H
-
-#if defined(EA_PRAGMA_ONCE_SUPPORTED)
- #pragma once
-#endif
-
-
-#if defined(EA_COMPILER_MSVC) && !defined(EA_COMPILER_CLANG_CL)
-
- #if defined(EA_PROCESSOR_ARM32)
-
- #define EASTL_ARM_DMB_ISH _ARM_BARRIER_ISH
-
- #define EASTL_ARM_DMB_ISHST _ARM_BARRIER_ISHST
-
- #define EASTL_ARM_DMB_ISHLD _ARM_BARRIER_ISH
-
- #elif defined(EA_PROCESSOR_ARM64)
-
- #define EASTL_ARM_DMB_ISH _ARM64_BARRIER_ISH
-
- #define EASTL_ARM_DMB_ISHST _ARM64_BARRIER_ISHST
-
- #define EASTL_ARM_DMB_ISHLD _ARM64_BARRIER_ISHLD
-
- #endif
-
-
- /**
- * NOTE:
- *
- * While it makes no sense for a hardware memory barrier to not imply a compiler barrier.
- * MSVC docs do not explicitly state that, so better to be safe than sorry chasing down
- * hard to find bugs due to the compiler deciding to reorder things.
- */
-
- #define EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(option) \
- EASTL_ATOMIC_COMPILER_BARRIER(); \
- __dmb(option); \
- EASTL_ATOMIC_COMPILER_BARRIER()
-
-
-#elif defined(EA_COMPILER_GNUC) || defined(__clang__)
-
- #define EASTL_ARM_DMB_ISH ish
-
- #define EASTL_ARM_DMB_ISHST ishst
-
- #if defined(EA_PROCESSOR_ARM32)
-
- #define EASTL_ARM_DMB_ISHLD ish
-
- #elif defined(EA_PROCESSOR_ARM64)
-
- #define EASTL_ARM_DMB_ISHLD ishld
-
- #endif
-
-
- #define EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(option) \
- __asm__ __volatile__ ("dmb " EA_STRINGIFY(option) ::: "memory")
-
-
-#endif
-
-
-/////////////////////////////////////////////////////////////////////////////////
-//
-// void EASTL_ARCH_ATOMIC_CPU_MB()
-//
-#define EASTL_ARCH_ATOMIC_CPU_MB() \
- EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISH)
-
-
-/////////////////////////////////////////////////////////////////////////////////
-//
-// void EASTL_ARCH_ATOMIC_CPU_WMB()
-//
-#define EASTL_ARCH_ATOMIC_CPU_WMB() \
- EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISHST)
-
-
-/////////////////////////////////////////////////////////////////////////////////
-//
-// void EASTL_ARCH_ATOMIC_CPU_RMB()
-//
-#define EASTL_ARCH_ATOMIC_CPU_RMB() \
- EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISHLD)
-
-
-#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H */
diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_store.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_store.h
deleted file mode 100644
index ab53b9d..0000000
--- a/include/EASTL/internal/atomic/arch/arm/arch_arm_store.h
+++ /dev/null
@@ -1,142 +0,0 @@
-/////////////////////////////////////////////////////////////////////////////////
-// Copyright (c) Electronic Arts Inc. All rights reserved.
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_STORE_H
-#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_STORE_H
-
-#if defined(EA_PRAGMA_ONCE_SUPPORTED)
- #pragma once
-#endif
-
-
-/////////////////////////////////////////////////////////////////////////////////
-//
-// void EASTL_ARCH_ATOMIC_STORE_*_N(type, type * ptr, type val)
-//
-#if defined(EA_COMPILER_MSVC)
-
-
- #define EASTL_ARCH_ATOMIC_ARM_STORE_N(integralType, bits, type, ptr, val) \
- EA_PREPROCESSOR_JOIN(__iso_volatile_store, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)))
-
-
- #define EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_N(__int8, 8, type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_N(__int16, 16, type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_N(__int32, 32, type, ptr, val)
-
-
- #if defined(EA_PROCESSOR_ARM64)
-
- #define EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_N(__int64, 64, type, ptr, val)
-
- #endif
-
-
- #define EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, MemoryOrder) \
- { \
- type exchange128; EA_UNUSED(exchange128); \
- EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \
- }
-
-
- #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, RELAXED)
-
-
- #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, RELEASE)
-
-
- #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val) ; \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val); \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, SEQ_CST)
-
-
- #if defined(EA_PROCESSOR_ARM32)
-
-
- #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \
- { \
- type retExchange64; EA_UNUSED(retExchange64); \
- EASTL_ATOMIC_EXCHANGE_RELAXED_64(type, retExchange64, ptr, val); \
- }
-
- #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \
- { \
- type retExchange64; EA_UNUSED(retExchange64); \
- EASTL_ATOMIC_EXCHANGE_RELEASE_64(type, retExchange64, ptr, val); \
- }
-
- #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
- { \
- type retExchange64; EA_UNUSED(retExchange64); \
- EASTL_ATOMIC_EXCHANGE_SEQ_CST_64(type, retExchange64, ptr, val); \
- }
-
-
- #elif defined(EA_PROCESSOR_ARM64)
-
-
- #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \
- EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val)
-
- #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \
- EASTL_ATOMIC_CPU_MB(); \
- EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val); \
- EASTL_ATOMIC_CPU_MB()
-
-
- #endif
-
-
-#endif
-
-
-#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_STORE_H */
diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h
deleted file mode 100644
index 391c64e..0000000
--- a/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/////////////////////////////////////////////////////////////////////////////////
-// Copyright (c) Electronic Arts Inc. All rights reserved.
-/////////////////////////////////////////////////////////////////////////////////
-
-
-#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H
-#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H
-
-#if defined(EA_PRAGMA_ONCE_SUPPORTED)
- #pragma once
-#endif
-
-
-/////////////////////////////////////////////////////////////////////////////////
-//
-// void EASTL_ARCH_ATOMIC_THREAD_FENCE_*()
-//
-#if defined(EA_COMPILER_MSVC)
-
- #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED()
-
- #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE() \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE() \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL() \
- EASTL_ATOMIC_CPU_MB()
-
- #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST() \
- EASTL_ATOMIC_CPU_MB()
-
-#endif
-
-
-#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H */