diff options
Diffstat (limited to 'include/EASTL/internal/atomic/arch')
44 files changed, 0 insertions, 5529 deletions
diff --git a/include/EASTL/internal/atomic/arch/arch.h b/include/EASTL/internal/atomic/arch/arch.h deleted file mode 100644 index 4924a59..0000000 --- a/include/EASTL/internal/atomic/arch/arch.h +++ /dev/null @@ -1,65 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// Include the architecture specific implementations -// -#if defined(EA_PROCESSOR_X86) || defined(EA_PROCESSOR_X86_64) - - #include "x86/arch_x86.h" - -#elif defined(EA_PROCESSOR_ARM32) || defined(EA_PROCESSOR_ARM64) - - #include "arm/arch_arm.h" - -#endif - - -///////////////////////////////////////////////////////////////////////////////// - - -#include "arch_fetch_add.h" -#include "arch_fetch_sub.h" - -#include "arch_fetch_and.h" -#include "arch_fetch_xor.h" -#include "arch_fetch_or.h" - -#include "arch_add_fetch.h" -#include "arch_sub_fetch.h" - -#include "arch_and_fetch.h" -#include "arch_xor_fetch.h" -#include "arch_or_fetch.h" - -#include "arch_exchange.h" - -#include "arch_cmpxchg_weak.h" -#include "arch_cmpxchg_strong.h" - -#include "arch_load.h" -#include "arch_store.h" - -#include "arch_compiler_barrier.h" - -#include "arch_cpu_pause.h" - -#include "arch_memory_barrier.h" - -#include "arch_signal_fence.h" - -#include "arch_thread_fence.h" - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_add_fetch.h b/include/EASTL/internal/atomic/arch/arch_add_fetch.h deleted file mode 100644 index 65771f8..0000000 --- a/include/EASTL/internal/atomic/arch/arch_add_fetch.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ADD_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_ADD_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_ADD_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_8) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_8) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_16) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_16) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_32) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_32) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_64) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_64) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_128) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_128) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ADD_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_and_fetch.h b/include/EASTL/internal/atomic/arch/arch_and_fetch.h deleted file mode 100644 index df7ba35..0000000 --- a/include/EASTL/internal/atomic/arch/arch_and_fetch.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_AND_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_AND_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_AND_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_8) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_8) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_16) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_16) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_32) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_32) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_64) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_64) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_128) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_128) - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_AND_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_cmpxchg_strong.h b/include/EASTL/internal/atomic/arch/arch_cmpxchg_strong.h deleted file mode 100644 index 1005dc3..0000000 --- a/include/EASTL/internal/atomic/arch/arch_cmpxchg_strong.h +++ /dev/null @@ -1,430 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_CMPXCHG_STRONG_H -#define EASTL_ATOMIC_INTERNAL_ARCH_CMPXCHG_STRONG_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_*_*_N(type, bool ret, type * ptr, type * expected, type desired) -// -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128_AVAILABLE 0 -#endif - - -///////////////////////////////////////////////////////////////////////////////// - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_*_N(type, bool ret, type * ptr, type * expected, type desired) -// -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_8(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_16(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_32(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_64(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_CMPXCHG_STRONG_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_cmpxchg_weak.h b/include/EASTL/internal/atomic/arch/arch_cmpxchg_weak.h deleted file mode 100644 index 5ce2638..0000000 --- a/include/EASTL/internal/atomic/arch/arch_cmpxchg_weak.h +++ /dev/null @@ -1,430 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_CMPXCHG_WEAK_H -#define EASTL_ATOMIC_INTERNAL_ARCH_CMPXCHG_WEAK_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_*_*_N(type, bool ret, type * ptr, type * expected, type desired) -// -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128_AVAILABLE 0 -#endif - - -///////////////////////////////////////////////////////////////////////////////// - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_*_N(type, bool ret, type * ptr, type * expected, type desired) -// -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_8(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_8_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_8_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_8(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_8(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_16(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_16_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_16_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_16(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_16(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_32(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_32_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_32_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_32(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_32(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_64(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_64_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_64_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_64(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_64(type, ret, ptr, expected, desired) - - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) - -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_128_AVAILABLE \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128_AVAILABLE -#define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_CMPXCHG_WEAK_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_compiler_barrier.h b/include/EASTL/internal/atomic/arch/arch_compiler_barrier.h deleted file mode 100644 index 0652469..0000000 --- a/include/EASTL/internal/atomic/arch/arch_compiler_barrier.h +++ /dev/null @@ -1,19 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_COMPILER_BARRIER_H -#define EASTL_ATOMIC_INTERNAL_ARCH_COMPILER_BARRIER_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -#define EASTL_ARCH_ATOMIC_COMPILER_BARRIER_AVAILABLE 0 - -#define EASTL_ARCH_ATOMIC_COMPILER_BARRIER_DATA_DEPENDENCY_AVAILABLE 0 - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_COMPILER_BARRIER_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_cpu_pause.h b/include/EASTL/internal/atomic/arch/arch_cpu_pause.h deleted file mode 100644 index e8c2d1d..0000000 --- a/include/EASTL/internal/atomic/arch/arch_cpu_pause.h +++ /dev/null @@ -1,25 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// copyright (c) electronic arts inc. all rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_CPU_PAUSE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_CPU_PAUSE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_PAUSE() -// -#if defined(EASTL_ARCH_ATOMIC_CPU_PAUSE) - #define EASTL_ARCH_ATOMIC_CPU_PAUSE_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CPU_PAUSE_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_CPU_PAUSE_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_exchange.h b/include/EASTL/internal/atomic/arch/arch_exchange.h deleted file mode 100644 index 7600318..0000000 --- a/include/EASTL/internal/atomic/arch/arch_exchange.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_EXCHANGE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_EXCHANGE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_EXCHANGE_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_8) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_8) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_16) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_16) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_32) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_32) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_64) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_64) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_128) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_128) - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_EXCHANGE_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_fetch_add.h b/include/EASTL/internal/atomic/arch/arch_fetch_add.h deleted file mode 100644 index 71907f7..0000000 --- a/include/EASTL/internal/atomic/arch/arch_fetch_add.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_FETCH_ADD_H -#define EASTL_ATOMIC_INTERNAL_ARCH_FETCH_ADD_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_ADD_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_8) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_8) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_16) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_16) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_32) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_32) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_64) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_64) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_128) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_128) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_FETCH_ADD_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_fetch_and.h b/include/EASTL/internal/atomic/arch/arch_fetch_and.h deleted file mode 100644 index f2b39a4..0000000 --- a/include/EASTL/internal/atomic/arch/arch_fetch_and.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_FETCH_AND_H -#define EASTL_ATOMIC_INTERNAL_ARCH_FETCH_AND_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_AND_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_8) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_8) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_16) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_16) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_32) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_32) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_64) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_64) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_128) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_128) - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_FETCH_AND_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_fetch_or.h b/include/EASTL/internal/atomic/arch/arch_fetch_or.h deleted file mode 100644 index dd6dd0d..0000000 --- a/include/EASTL/internal/atomic/arch/arch_fetch_or.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_FETCH_OR_H -#define EASTL_ATOMIC_INTERNAL_ARCH_FETCH_OR_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_OR_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_8) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_8) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_16) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_16) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_32) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_32) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_64) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_64) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_128) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_128) - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_FETCH_OR_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_fetch_sub.h b/include/EASTL/internal/atomic/arch/arch_fetch_sub.h deleted file mode 100644 index ea63db7..0000000 --- a/include/EASTL/internal/atomic/arch/arch_fetch_sub.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_FETCH_SUB_H -#define EASTL_ATOMIC_INTERNAL_ARCH_FETCH_SUB_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_SUB_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_8) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_8) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_16) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_16) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_32) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_32) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_64) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_64) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_128) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_128) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_FETCH_SUB_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_fetch_xor.h b/include/EASTL/internal/atomic/arch/arch_fetch_xor.h deleted file mode 100644 index b41ad2d..0000000 --- a/include/EASTL/internal/atomic/arch/arch_fetch_xor.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_FETCH_XOR_H -#define EASTL_ATOMIC_INTERNAL_ARCH_FETCH_XOR_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_XOR_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_8) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_8) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_16) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_16) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_32) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_32) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_64) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_64) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_128) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_128) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_FETCH_XOR_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_load.h b/include/EASTL/internal/atomic/arch/arch_load.h deleted file mode 100644 index eea7cf4..0000000 --- a/include/EASTL/internal/atomic/arch/arch_load.h +++ /dev/null @@ -1,125 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_LOAD_H -#define EASTL_ATOMIC_INTERNAL_ARCH_LOAD_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_LOAD_*_N(type, type ret, type * ptr) -// -#if defined(EASTL_ARCH_ATOMIC_LOAD_RELAXED_8) - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_LOAD_RELAXED_16) - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_LOAD_RELAXED_32) - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_READ_DEPENDS_32) - #define EASTL_ARCH_ATOMIC_LOAD_READ_DEPENDS_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_READ_DEPENDS_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_LOAD_RELAXED_64) - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_READ_DEPENDS_64) - #define EASTL_ARCH_ATOMIC_LOAD_READ_DEPENDS_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_READ_DEPENDS_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_LOAD_RELAXED_128) - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_LOAD_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_memory_barrier.h b/include/EASTL/internal/atomic/arch/arch_memory_barrier.h deleted file mode 100644 index c6cc6bf..0000000 --- a/include/EASTL/internal/atomic/arch/arch_memory_barrier.h +++ /dev/null @@ -1,47 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_MEMORY_BARRIER_H -#define EASTL_ATOMIC_INTERNAL_ARCH_MEMORY_BARRIER_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_MB() -// -#if defined(EASTL_ARCH_ATOMIC_CPU_MB) - #define EASTL_ARCH_ATOMIC_CPU_MB_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CPU_MB_AVAILABLE 0 -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_WMB() -// -#if defined(EASTL_ARCH_ATOMIC_CPU_WMB) - #define EASTL_ARCH_ATOMIC_CPU_WMB_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CPU_WMB_AVAILABLE 0 -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_RMB() -// -#if defined(EASTL_ARCH_ATOMIC_CPU_RMB) - #define EASTL_ARCH_ATOMIC_CPU_RMB_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_CPU_RMB_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_MEMORY_BARRIER_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_or_fetch.h b/include/EASTL/internal/atomic/arch/arch_or_fetch.h deleted file mode 100644 index 110326b..0000000 --- a/include/EASTL/internal/atomic/arch/arch_or_fetch.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_OR_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_OR_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_OR_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_8) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_8) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_16) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_16) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_32) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_32) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_64) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_64) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_128) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_128) - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_OR_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_signal_fence.h b/include/EASTL/internal/atomic/arch/arch_signal_fence.h deleted file mode 100644 index 65b64fc..0000000 --- a/include/EASTL/internal/atomic/arch/arch_signal_fence.h +++ /dev/null @@ -1,21 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_SIGNAL_FENCE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_SIGNAL_FENCE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -#define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_RELAXED_AVAILABLE 0 -#define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_ACQUIRE_AVAILABLE 0 -#define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_RELEASE_AVAILABLE 0 -#define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_ACQ_REL_AVAILABLE 0 -#define EASTL_ARCH_ATOMIC_SIGNAL_FENCE_SEQ_CST_AVAILABLE 0 - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_SIGNAL_FENCE_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_store.h b/include/EASTL/internal/atomic/arch/arch_store.h deleted file mode 100644 index 9a4112c..0000000 --- a/include/EASTL/internal/atomic/arch/arch_store.h +++ /dev/null @@ -1,113 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_STORE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_STORE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_STORE_*_N(type, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_8) - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_8) - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_16) - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_16) - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_32) - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_32) - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_64) - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_64) - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELAXED_128) - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_RELEASE_128) - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_STORE_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_sub_fetch.h b/include/EASTL/internal/atomic/arch/arch_sub_fetch.h deleted file mode 100644 index 20241b1..0000000 --- a/include/EASTL/internal/atomic/arch/arch_sub_fetch.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_SUB_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_SUB_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_SUB_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_8) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_8) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_16) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_16) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_32) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_32) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_64) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_64) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_128) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_128) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_SUB_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_thread_fence.h b/include/EASTL/internal/atomic/arch/arch_thread_fence.h deleted file mode 100644 index 676fbf1..0000000 --- a/include/EASTL/internal/atomic/arch/arch_thread_fence.h +++ /dev/null @@ -1,49 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_THREAD_FENCE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_THREAD_FENCE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_THREAD_FENCE_*() -// -#if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED) - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE) - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE) - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL) - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST) - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_THREAD_FENCE_H */ diff --git a/include/EASTL/internal/atomic/arch/arch_xor_fetch.h b/include/EASTL/internal/atomic/arch/arch_xor_fetch.h deleted file mode 100644 index 63548c2..0000000 --- a/include/EASTL/internal/atomic/arch/arch_xor_fetch.h +++ /dev/null @@ -1,173 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_XOR_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_XOR_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_XOR_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_8) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_8) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_8) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_8) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_8_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_8) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_8_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_8_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_16) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_16) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_16) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_16) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_16_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_16) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_16_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_16_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_32) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_32) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_32) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_32) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_32_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_32) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_32_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_32_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_64) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_64) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_64) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_64) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_64_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_64) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_64_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_64_AVAILABLE 0 -#endif - - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_128) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_128) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_128) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_128) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_128_AVAILABLE 0 -#endif - -#if defined(EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_128) - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_128_AVAILABLE 1 -#else - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_128_AVAILABLE 0 -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_XOR_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm.h b/include/EASTL/internal/atomic/arch/arm/arch_arm.h deleted file mode 100644 index cc2ce52..0000000 --- a/include/EASTL/internal/atomic/arch/arm/arch_arm.h +++ /dev/null @@ -1,89 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_H -#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -/** - * NOTE: We use this mapping - * - * ARMv7 Mapping 'trailing sync;': - * - * Load Relaxed : ldr - * Load Acquire : ldr; dmb ish - * Load Seq_Cst : ldr; dmb ish - * - * Store Relaxed : str - * Store Release : dmb ish; str - * Store Seq_Cst : dmb ish; str; dmb ish - * - * Relaxed Fence : - * Acquire Fence : dmb ish - * Release Fence : dmb ish - * Acq_Rel Fence : dmb ish - * Seq_Cst Fence : dmb ish - */ - -/** - * ARMv7 Mapping 'leading sync;'; - * - * Load Relaxed : ldr - * Load Acquire : ldr; dmb ish - * Load Seq_Cst : dmb ish; ldr; dmb ish - * - * Store Relaxed : str - * Store Release : dmb ish; str - * Store Seq_Cst : dmb ish: str - * - * Relaxed Fence : - * Acquire Fence : dmb ish - * Release Fence : dmb ish - * Acq_Rel Fence : dmb ish - * Seq_Cst Fence : dmb ish - */ - -/** - * NOTE: - * - * On ARM32/64, we use the 'trailing sync;' convention with the stricter load acquire that uses - * a dmb instead of a control dependency + isb to ensure the IRIW litmus test is satisfied - * as one reason. See EASTL/atomic.h for futher explanation and deep-dive. - * - * For ARMv8 we could move to use the new proper store release and load acquire, RCsc variant. - * All ARMv7 approaches work on ARMv8 and this code path is only used on msvc which isn't used - * heavily. Most of the ARM code will end up going thru clang or gcc since microsoft arm devices - * aren't that abundant. - */ - - -///////////////////////////////////////////////////////////////////////////////// - - -#if defined(EA_COMPILER_MSVC) - - #if EA_PLATFORM_PTR_SIZE == 8 - #define EASTL_ARCH_ATOMIC_HAS_128BIT - #endif - -#endif - - -///////////////////////////////////////////////////////////////////////////////// - - -#include "arch_arm_load.h" -#include "arch_arm_store.h" - -#include "arch_arm_memory_barrier.h" - -#include "arch_arm_thread_fence.h" - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_H */ diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_load.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_load.h deleted file mode 100644 index e3b79b8..0000000 --- a/include/EASTL/internal/atomic/arch/arm/arch_arm_load.h +++ /dev/null @@ -1,156 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H -#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_LOAD_*_N(type, type ret, type * ptr) -// -#if defined(EA_COMPILER_MSVC) - - - /** - * NOTE: - * - * Even 8-byte aligned 64-bit memory accesses on ARM32 are not - * guaranteed to be atomic on all ARM32 cpus. Only guaranteed on - * cpus with the LPAE extension. We need to use a - * ldrexd instruction in order to ensure no shearing is observed - * for all ARM32 processors. - */ - #if defined(EA_PROCESSOR_ARM32) - - #define EASTL_ARCH_ATOMIC_ARM32_LDREXD(ret, ptr) \ - ret = __ldrexd((ptr)) - - #endif - - - #define EASTL_ARCH_ATOMIC_ARM_LOAD_N(integralType, bits, type, ret, ptr) \ - { \ - integralType retIntegral; \ - retIntegral = EA_PREPROCESSOR_JOIN(__iso_volatile_load, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr))); \ - \ - ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \ - } - - - #define EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int8, 8, type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int16, 16, type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int32, 32, type, ret, ptr) - - - #if defined(EA_PROCESSOR_ARM32) - - - #define EASTL_ARCH_ATOMIC_LOAD_64(type, ret, ptr) \ - { \ - __int64 loadRet64; \ - EASTL_ARCH_ATOMIC_ARM32_LDREXD(loadRet64, EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__int64, (ptr))); \ - \ - ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, loadRet64); \ - } - - #else - - #define EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_N(__int64, 64, type, ret, ptr) - - #endif - - - /** - * NOTE: - * - * The ARM documentation states the following: - * A 64-bit pair requires the address to be quadword aligned and is single-copy atomic for each doubleword at doubleword granularity - * - * Thus we must ensure the store succeeds inorder for the load to be observed as atomic. - * Thus we must use the full cmpxchg in order to do a proper atomic load. - */ - #define EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, MemoryOrder) \ - { \ - bool cmpxchgRetBool; \ - ret = *(ptr); \ - do \ - { \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRetBool, \ - ptr, &(ret), ret); \ - } while (!cmpxchgRetBool); \ - } - - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, RELAXED) - - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, ACQUIRE) - - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_8(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_16(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_32(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_64(type, ret, ptr); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_ARM_LOAD_128(type, ret, ptr, SEQ_CST) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_LOAD_H */ diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h deleted file mode 100644 index 44dc991..0000000 --- a/include/EASTL/internal/atomic/arch/arm/arch_arm_memory_barrier.h +++ /dev/null @@ -1,97 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H -#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -#if defined(EA_COMPILER_MSVC) && !defined(EA_COMPILER_CLANG_CL) - - #if defined(EA_PROCESSOR_ARM32) - - #define EASTL_ARM_DMB_ISH _ARM_BARRIER_ISH - - #define EASTL_ARM_DMB_ISHST _ARM_BARRIER_ISHST - - #define EASTL_ARM_DMB_ISHLD _ARM_BARRIER_ISH - - #elif defined(EA_PROCESSOR_ARM64) - - #define EASTL_ARM_DMB_ISH _ARM64_BARRIER_ISH - - #define EASTL_ARM_DMB_ISHST _ARM64_BARRIER_ISHST - - #define EASTL_ARM_DMB_ISHLD _ARM64_BARRIER_ISHLD - - #endif - - - /** - * NOTE: - * - * While it makes no sense for a hardware memory barrier to not imply a compiler barrier. - * MSVC docs do not explicitly state that, so better to be safe than sorry chasing down - * hard to find bugs due to the compiler deciding to reorder things. - */ - - #define EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(option) \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - __dmb(option); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - -#elif defined(EA_COMPILER_GNUC) || defined(__clang__) - - #define EASTL_ARM_DMB_ISH ish - - #define EASTL_ARM_DMB_ISHST ishst - - #if defined(EA_PROCESSOR_ARM32) - - #define EASTL_ARM_DMB_ISHLD ish - - #elif defined(EA_PROCESSOR_ARM64) - - #define EASTL_ARM_DMB_ISHLD ishld - - #endif - - - #define EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(option) \ - __asm__ __volatile__ ("dmb " EA_STRINGIFY(option) ::: "memory") - - -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_MB() -// -#define EASTL_ARCH_ATOMIC_CPU_MB() \ - EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISH) - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_WMB() -// -#define EASTL_ARCH_ATOMIC_CPU_WMB() \ - EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISHST) - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_RMB() -// -#define EASTL_ARCH_ATOMIC_CPU_RMB() \ - EASTL_ARCH_ATOMIC_ARM_EMIT_DMB(EASTL_ARM_DMB_ISHLD) - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_MEMORY_BARRIER_H */ diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_store.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_store.h deleted file mode 100644 index ab53b9d..0000000 --- a/include/EASTL/internal/atomic/arch/arm/arch_arm_store.h +++ /dev/null @@ -1,142 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_STORE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_STORE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_STORE_*_N(type, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) - - - #define EASTL_ARCH_ATOMIC_ARM_STORE_N(integralType, bits, type, ptr, val) \ - EA_PREPROCESSOR_JOIN(__iso_volatile_store, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val))) - - - #define EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_N(__int8, 8, type, ptr, val) - - #define EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_N(__int16, 16, type, ptr, val) - - #define EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_N(__int32, 32, type, ptr, val) - - - #if defined(EA_PROCESSOR_ARM64) - - #define EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_N(__int64, 64, type, ptr, val) - - #endif - - - #define EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, MemoryOrder) \ - { \ - type exchange128; EA_UNUSED(exchange128); \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \ - } - - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, RELAXED) - - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, RELEASE) - - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_8(type, ptr, val) ; \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_16(type, ptr, val); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_32(type, ptr, val); \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_128(type, ptr, val, SEQ_CST) - - - #if defined(EA_PROCESSOR_ARM32) - - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \ - { \ - type retExchange64; EA_UNUSED(retExchange64); \ - EASTL_ATOMIC_EXCHANGE_RELAXED_64(type, retExchange64, ptr, val); \ - } - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \ - { \ - type retExchange64; EA_UNUSED(retExchange64); \ - EASTL_ATOMIC_EXCHANGE_RELEASE_64(type, retExchange64, ptr, val); \ - } - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \ - { \ - type retExchange64; EA_UNUSED(retExchange64); \ - EASTL_ATOMIC_EXCHANGE_SEQ_CST_64(type, retExchange64, ptr, val); \ - } - - - #elif defined(EA_PROCESSOR_ARM64) - - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \ - EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \ - EASTL_ATOMIC_CPU_MB(); \ - EASTL_ARCH_ATOMIC_ARM_STORE_64(type, ptr, val); \ - EASTL_ATOMIC_CPU_MB() - - - #endif - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_STORE_H */ diff --git a/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h b/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h deleted file mode 100644 index 391c64e..0000000 --- a/include/EASTL/internal/atomic/arch/arm/arch_arm_thread_fence.h +++ /dev/null @@ -1,37 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_THREAD_FENCE_*() -// -#if defined(EA_COMPILER_MSVC) - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE() \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE() \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL() \ - EASTL_ATOMIC_CPU_MB() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST() \ - EASTL_ATOMIC_CPU_MB() - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_ARM_THREAD_FENCE_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86.h b/include/EASTL/internal/atomic/arch/x86/arch_x86.h deleted file mode 100644 index 77c383a..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86.h +++ /dev/null @@ -1,158 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -/** - * x86 && x64 Mappings - * - * Load Relaxed : MOV - * Load Acquire : MOV; COMPILER_BARRIER; - * Load Seq_Cst : MOV; COMPILER_BARRIER; - * - * Store Relaxed : MOV - * Store Release : COMPILER_BARRIER; MOV; - * Store Seq_Cst : LOCK XCHG : MOV; MFENCE; - * - * Relaxed Fence : - * Acquire Fence : COMPILER_BARRIER - * Release Fence : COMPILER_BARRIER - * Acq_Rel Fence : COMPILER_BARRIER - * Seq_Cst FENCE : MFENCE - */ - - -///////////////////////////////////////////////////////////////////////////////// - -#if (defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64) - #define EASTL_ARCH_ATOMIC_HAS_128BIT -#elif defined(EA_COMPILER_MSVC) - #if EA_PLATFORM_PTR_SIZE == 8 - #define EASTL_ARCH_ATOMIC_HAS_128BIT - #endif -#endif - -///////////////////////////////////////////////////////////////////////////////// - - -/** - * NOTE: - * - * On 32-bit x86 CPUs Intel Pentium and newer, AMD K5 and newer - * and any i586 class of x86 CPUs support only 64-bit cmpxchg - * known as cmpxchg8b. - * - * On these class of cpus we can guarantee that 64-bit loads/stores are - * also atomic by using the SSE2 movq, SSE1 movlps, or x87 fild/fstp instructions. - * - * We support all other atomic operations - * on compilers that only provide this 64-bit cmpxchg instruction - * by wrapping them around the 64-bit cmpxchg8b instruction. - */ -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_NOP_PRE_COMPUTE_DESIRED(ret, observed, val) \ - static_assert(false, "EASTL_ARCH_ATOMIC_X86_NOP_PRE_COMPUTE_DESIRED() must be implmented!"); - - #define EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET(ret, prevObserved, val) - - - #define EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, MemoryOrder, PRE_COMPUTE_DESIRED, POST_COMPUTE_RET) \ - { \ - EASTL_ATOMIC_DEFAULT_INIT(bool, cmpxchgRet); \ - EASTL_ATOMIC_LOAD_RELAXED_64(type, ret, ptr); \ - do \ - { \ - type computedDesired; \ - PRE_COMPUTE_DESIRED(computedDesired, ret, (val)); \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _64)(type, cmpxchgRet, ptr, &(ret), computedDesired); \ - } while (!cmpxchgRet); \ - POST_COMPUTE_RET(ret, ret, (val)); \ - } - - -#endif - - -/** - * NOTE: - * - * 64-bit x64 CPUs support only 128-bit cmpxchg known as cmpxchg16b. - * - * We support all other atomic operations by wrapping them around - * the 128-bit cmpxchg16b instruction. - * - * 128-bit loads are only atomic by using the cmpxchg16b instruction. - * SSE 128-bit loads are not guaranteed to be atomic even though some CPUs - * make them atomic such as AMD Ryzen or Intel SandyBridge. - */ -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_NOP_PRE_COMPUTE_DESIRED(ret, observed, val) \ - static_assert(false, "EASTL_ARCH_ATOMIC_X86_NOP_PRE_COMPUTE_DESIRED() must be implmented!"); - - #define EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET(ret, prevObserved, val) - - - #define EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, MemoryOrder, PRE_COMPUTE_DESIRED, POST_COMPUTE_RET) \ - { \ - EASTL_ATOMIC_DEFAULT_INIT(bool, cmpxchgRet); \ - /* This is intentionally a non-atomic 128-bit load which may observe shearing. */ \ - /* Either we do not observe *(ptr) but then the cmpxchg will fail and the observed */ \ - /* atomic load will be returned. Or the non-atomic load got lucky and the cmpxchg succeeds */ \ - /* because the observed value equals the value in *(ptr) thus we optimistically do a non-atomic load. */ \ - ret = *(ptr); \ - do \ - { \ - type computedDesired; \ - PRE_COMPUTE_DESIRED(computedDesired, ret, (val)); \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRet, ptr, &(ret), computedDesired); \ - } while (!cmpxchgRet); \ - POST_COMPUTE_RET(ret, ret, (val)); \ - } - - -#endif - - -///////////////////////////////////////////////////////////////////////////////// - - -#include "arch_x86_fetch_add.h" -#include "arch_x86_fetch_sub.h" - -#include "arch_x86_fetch_and.h" -#include "arch_x86_fetch_xor.h" -#include "arch_x86_fetch_or.h" - -#include "arch_x86_add_fetch.h" -#include "arch_x86_sub_fetch.h" - -#include "arch_x86_and_fetch.h" -#include "arch_x86_xor_fetch.h" -#include "arch_x86_or_fetch.h" - -#include "arch_x86_exchange.h" - -#include "arch_x86_cmpxchg_weak.h" -#include "arch_x86_cmpxchg_strong.h" - -#include "arch_x86_memory_barrier.h" - -#include "arch_x86_thread_fence.h" - -#include "arch_x86_load.h" -#include "arch_x86_store.h" - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_add_fetch.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_add_fetch.h deleted file mode 100644 index 7b77528..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_add_fetch.h +++ /dev/null @@ -1,96 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_ADD_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_ADD_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_ADD_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) + (val)) - - #define EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) + (val)) - - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) + (val)) - - #define EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) + (val)) - - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_ADD_FETCH_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_ADD_FETCH_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_ADD_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_and_fetch.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_and_fetch.h deleted file mode 100644 index 0583163..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_and_fetch.h +++ /dev/null @@ -1,96 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_AND_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_AND_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_AND_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) & (val)) - - #define EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) & (val)) - - - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) & (val)) - - #define EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) & (val)) - - - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_AND_FETCH_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_AND_FETCH_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_AND_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_strong.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_strong.h deleted file mode 100644 index 1968e9a..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_strong.h +++ /dev/null @@ -1,69 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_STRONG_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_STRONG_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_*_*_N(type, bool ret, type * ptr, type * expected, type desired) -// -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) \ - { \ - /* Compare RDX:RAX with m128. If equal, set ZF and load RCX:RBX into m128. Else, clear ZF and load m128 into RDX:RAX. */ \ - __asm__ __volatile__ ("lock; cmpxchg16b %2\n" /* cmpxchg16b sets/clears ZF */ \ - "sete %3" /* If ZF == 1, set the return value to 1 */ \ - /* Output Operands */ \ - : "=a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[0]), "=d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[1]), \ - "+m"(*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__uint128_t, (ptr)))), \ - "=rm"((ret)) \ - /* Input Operands */ \ - : "b"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(desired)))[0]), "c"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(desired)))[1]), \ - "a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[0]), "d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, (expected)))[1]) \ - /* Clobbers */ \ - : "memory", "cc"); \ - } - - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) \ - EASTL_ARCH_ATOMIC_X86_CMPXCHG_STRONG_128_IMPL(type, ret, ptr, expected, desired) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_STRONG_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_weak.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_weak.h deleted file mode 100644 index 61a126c..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_cmpxchg_weak.h +++ /dev/null @@ -1,52 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_WEAK_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_WEAK_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_*_*_N(type, bool ret, type * ptr, type * expected, type desired) -// -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_RELAXED_RELAXED_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_ACQUIRE_RELAXED_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_ACQUIRE_ACQUIRE_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_RELEASE_RELAXED_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_ACQ_REL_RELAXED_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_ACQ_REL_ACQUIRE_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_RELAXED_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_SEQ_CST_RELAXED_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_ACQUIRE_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_SEQ_CST_ACQUIRE_128(type, ret, ptr, expected, desired) - - #define EASTL_ARCH_ATOMIC_CMPXCHG_WEAK_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) \ - EASTL_ATOMIC_CMPXCHG_STRONG_SEQ_CST_SEQ_CST_128(type, ret, ptr, expected, desired) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_CMPXCHG_WEAK_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_exchange.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_exchange.h deleted file mode 100644 index b1de7d8..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_exchange.h +++ /dev/null @@ -1,91 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_EXCHANGE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_EXCHANGE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_EXCHANGE_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = (val) - - - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, MemoryOrder) \ - { \ - EASTL_ATOMIC_DEFAULT_INIT(bool, cmpxchgRet); \ - /* This is intentionally a non-atomic 128-bit load which may observe shearing. */ \ - /* Either we do not observe *(ptr) but then the cmpxchg will fail and the observed */ \ - /* atomic load will be returned. Or the non-atomic load got lucky and the cmpxchg succeeds */ \ - /* because the observed value equals the value in *(ptr) thus we optimistically do a non-atomic load. */ \ - ret = *(ptr); \ - do \ - { \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRet, ptr, &(ret), val); \ - } while (!cmpxchgRet); \ - } - - - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, RELAXED) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, ACQUIRE) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, RELEASE) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, ACQ_REL) - - #define EASTL_ARCH_ATOMIC_EXCHANGE_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_EXCHANGE_128(type, ret, ptr, val, SEQ_CST) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_EXCHANGE_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_add.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_add.h deleted file mode 100644 index e816af9..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_add.h +++ /dev/null @@ -1,90 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_ADD_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_ADD_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_ADD_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) + (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) + (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_ADD_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_ADD_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_ADD_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_and.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_and.h deleted file mode 100644 index ff27b1a..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_and.h +++ /dev/null @@ -1,90 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_AND_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_AND_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_AND_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) & (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) & (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_AND_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_AND_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_AND_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_or.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_or.h deleted file mode 100644 index 8627d3a..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_or.h +++ /dev/null @@ -1,90 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_OR_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_OR_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_OR_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) | (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) | (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_OR_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_OR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_OR_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_sub.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_sub.h deleted file mode 100644 index 14b43f9..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_sub.h +++ /dev/null @@ -1,90 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_SUB_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_SUB_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_SUB_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) - (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) - (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_SUB_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_SUB_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_SUB_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_xor.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_xor.h deleted file mode 100644 index 666df8b..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_fetch_xor.h +++ /dev/null @@ -1,90 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_FETCH_XOR_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) ^ (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) ^ (val)) - - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_FETCH_XOR_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_FETCH_XOR_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_NOP_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_FETCH_XOR_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_load.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_load.h deleted file mode 100644 index 644a2a1..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_load.h +++ /dev/null @@ -1,164 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_LOAD_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_LOAD_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_LOAD_*_N(type, type ret, type * ptr) -// - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - /** - * NOTE: - * - * Since the cmpxchg 128-bit inline assembly does a sete in the asm to set the return boolean, - * it doesn't get dead-store removed even though we don't care about the success of the - * cmpxchg since the compiler cannot reason about what is inside asm blocks. - * Thus this variant just does the minimum required to do an atomic load. - */ -#define EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, MemoryOrder) \ - { \ - EASTL_ATOMIC_FIXED_WIDTH_TYPE_128 expected = 0; \ - ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, expected); \ - \ - /* Compare RDX:RAX with m128. If equal, set ZF and load RCX:RBX into m128. Else, clear ZF and load m128 into RDX:RAX. */ \ - __asm__ __volatile__ ("lock; cmpxchg16b %2" /* cmpxchg16b sets/clears ZF */ \ - /* Output Operands */ \ - : "=a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[0]), "=d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[1]), \ - "+m"(*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(__uint128_t, (ptr)))) \ - /* Input Operands */ \ - : "b"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[0]), "c"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[1]), \ - "a"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[0]), "d"((EASTL_ATOMIC_TYPE_CAST(uint64_t, &(ret)))[1]) \ - /* Clobbers */ \ - : "memory", "cc"); \ - } - - -#define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, RELAXED) - -#define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, ACQUIRE) - -#define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, SEQ_CST) - -#elif defined(EA_COMPILER_MSVC) - - - #if defined(EA_COMPILER_MSVC) && (EA_COMPILER_VERSION >= 1920) // >= VS2019 - - #define EASTL_ARCH_ATOMIC_X86_LOAD_N(integralType, bits, type, ret, ptr) \ - { \ - integralType retIntegral; \ - retIntegral = EA_PREPROCESSOR_JOIN(__iso_volatile_load, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr))); \ - \ - ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \ - } - - #else - - #define EASTL_ARCH_ATOMIC_X86_LOAD_N(integralType, bits, type, ret, ptr) \ - { \ - integralType retIntegral; \ - retIntegral = (*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)))); \ - \ - ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, retIntegral); \ - } - - #endif - - - #define EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, MemoryOrder) \ - { \ - EASTL_ATOMIC_FIXED_WIDTH_TYPE_128 expected{0, 0}; \ - ret = EASTL_ATOMIC_TYPE_PUN_CAST(type, expected); \ - \ - bool cmpxchgRetBool; EA_UNUSED(cmpxchgRetBool); \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_CMPXCHG_STRONG_, MemoryOrder), _128)(type, cmpxchgRetBool, ptr, &(ret), ret); \ - } - - - #define EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_N(__int8, 8, type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_N(__int16, 16, type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_N(__int32, 32, type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_N(__int64, 64, type, ret, ptr) - - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr) - - #define EASTL_ARCH_ATOMIC_LOAD_RELAXED_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, RELAXED) - - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_ACQUIRE_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, ACQUIRE) - - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_8(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_8(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_16(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_16(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_32(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_32(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_64(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_64(type, ret, ptr); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_LOAD_SEQ_CST_128(type, ret, ptr) \ - EASTL_ARCH_ATOMIC_X86_LOAD_128(type, ret, ptr, SEQ_CST) - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_LOAD_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_memory_barrier.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_memory_barrier.h deleted file mode 100644 index 7bad141..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_memory_barrier.h +++ /dev/null @@ -1,104 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_MEMORY_BARRIER_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_MEMORY_BARRIER_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_MB() -// -#if defined(EA_COMPILER_MSVC) - - /** - * NOTE: - * While it makes no sense for a hardware memory barrier to not imply a compiler barrier. - * MSVC docs do not explicitly state that, so better to be safe than sorry chasing down - * hard to find bugs due to the compiler deciding to reorder things. - */ - - #if 1 - - // 4459 : declaration of 'identifier' hides global declaration - // 4456 : declaration of 'identifier' hides previous local declaration - #define EASTL_ARCH_ATOMIC_CPU_MB() \ - { \ - EA_DISABLE_VC_WARNING(4459 4456); \ - volatile long _; \ - _InterlockedExchangeAdd(&_, 0); \ - EA_RESTORE_VC_WARNING(); \ - } - - #else - - #define EASTL_ARCH_ATOMIC_CPU_MB() \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - _mm_mfence(); \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #endif - -#elif defined(__clang__) || defined(EA_COMPILER_GNUC) - - /** - * NOTE: - * - * mfence orders all loads/stores to/from all memory types. - * We only care about ordinary cacheable memory so lighter weight locked instruction - * is far faster than a mfence to get a full memory barrier. - * lock; addl against the top of the stack is good because: - * distinct for every thread so prevents false sharing - * that cacheline is most likely cache hot - * - * We intentionally do it below the stack pointer to avoid false RAW register dependencies, - * in cases where the compiler reads from the stack pointer after the lock; addl instruction - * - * Accounting for Red Zones or Cachelines doesn't provide extra benefit. - */ - - #if defined(EA_PROCESSOR_X86) - - #define EASTL_ARCH_ATOMIC_CPU_MB() \ - __asm__ __volatile__ ("lock; addl $0, -4(%%esp)" ::: "memory", "cc") - - #elif defined(EA_PROCESSOR_X86_64) - - #define EASTL_ARCH_ATOMIC_CPU_MB() \ - __asm__ __volatile__ ("lock; addl $0, -8(%%rsp)" ::: "memory", "cc") - - #else - - #define EASTL_ARCH_ATOMIC_CPU_MB() \ - __asm__ __volatile__ ("mfence" ::: "memory") - - #endif - - -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_WMB() -// -#define EASTL_ARCH_ATOMIC_CPU_WMB() \ - EASTL_ATOMIC_COMPILER_BARRIER() - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_CPU_RMB() -// -#define EASTL_ARCH_ATOMIC_CPU_RMB() \ - EASTL_ATOMIC_COMPILER_BARRIER() - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_MEMORY_BARRIER_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_or_fetch.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_or_fetch.h deleted file mode 100644 index 42f7d61..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_or_fetch.h +++ /dev/null @@ -1,96 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_OR_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_OR_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_OR_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) | (val)) - - #define EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) | (val)) - - - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) | (val)) - - #define EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) | (val)) - - - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_OR_FETCH_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_OR_FETCH_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_OR_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_store.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_store.h deleted file mode 100644 index 31655c3..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_store.h +++ /dev/null @@ -1,171 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_STORE_*_N(type, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) - - - #if defined(EA_COMPILER_MSVC) && (EA_COMPILER_VERSION >= 1920) // >= VS2019 - - #define EASTL_ARCH_ATOMIC_X86_STORE_N(integralType, bits, type, ptr, val) \ - EA_PREPROCESSOR_JOIN(__iso_volatile_store, bits)(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)), EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val))) - - #else - - #define EASTL_ARCH_ATOMIC_X86_STORE_N(integralType, bits, type, ptr, val) \ - { \ - integralType valIntegral = EASTL_ATOMIC_TYPE_PUN_CAST(integralType, (val)); \ - \ - (*(EASTL_ATOMIC_VOLATILE_INTEGRAL_CAST(integralType, (ptr)))) = valIntegral; \ - } - - #endif - - - #define EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, MemoryOrder) \ - { \ - type exchange128; EA_UNUSED(exchange128); \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \ - } - - - #define EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_N(__int8, 8, type, ptr, val) - - #define EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_N(__int16, 16, type, ptr, val) - - #define EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_N(__int32, 32, type, ptr, val) - - #define EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_N(__int64, 64, type, ptr, val) - - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_8(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_16(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_32(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_64(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELAXED) - - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_8(type, ptr, val) \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - EASTL_ARCH_ATOMIC_X86_STORE_8(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_16(type, ptr, val) \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - EASTL_ARCH_ATOMIC_X86_STORE_16(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_32(type, ptr, val) \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - EASTL_ARCH_ATOMIC_X86_STORE_32(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_64(type, ptr, val) \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELEASE) - - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_8(type, ptr, val) \ - { \ - type exchange8; EA_UNUSED(exchange8); \ - EASTL_ATOMIC_EXCHANGE_SEQ_CST_8(type, exchange8, ptr, val); \ - } - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_16(type, ptr, val) \ - { \ - type exchange16; EA_UNUSED(exchange16); \ - EASTL_ATOMIC_EXCHANGE_SEQ_CST_16(type, exchange16, ptr, val); \ - } - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_32(type, ptr, val) \ - { \ - type exchange32; EA_UNUSED(exchange32); \ - EASTL_ATOMIC_EXCHANGE_SEQ_CST_32(type, exchange32, ptr, val); \ - } - - - /** - * NOTE: - * - * Since 64-bit exchange is wrapped around a cmpxchg8b on 32-bit x86, it is - * faster to just do a mov; mfence. - */ - #if defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \ - EASTL_ATOMIC_COMPILER_BARRIER(); \ - EASTL_ARCH_ATOMIC_X86_STORE_64(type, ptr, val); \ - EASTL_ATOMIC_CPU_MB() - - - #elif defined(EA_PROCESSOR_X86_64) - - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_64(type, ptr, val) \ - { \ - type exchange64; EA_UNUSED(exchange64); \ - EASTL_ATOMIC_EXCHANGE_SEQ_CST_64(type, exchange64, ptr, val); \ - } - - - #endif - - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, SEQ_CST) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, MemoryOrder) \ - { \ - type exchange128; EA_UNUSED(exchange128); \ - EA_PREPROCESSOR_JOIN(EA_PREPROCESSOR_JOIN(EASTL_ATOMIC_EXCHANGE_, MemoryOrder), _128)(type, exchange128, ptr, val); \ - } - - - #define EASTL_ARCH_ATOMIC_STORE_RELAXED_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELAXED) - - #define EASTL_ARCH_ATOMIC_STORE_RELEASE_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, RELEASE) - - #define EASTL_ARCH_ATOMIC_STORE_SEQ_CST_128(type, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_STORE_128(type, ptr, val, SEQ_CST) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_STORE_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_sub_fetch.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_sub_fetch.h deleted file mode 100644 index a1d0932..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_sub_fetch.h +++ /dev/null @@ -1,96 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_SUB_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_SUB_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_SUB_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) - (val)) - - #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) - (val)) - - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) - (val)) - - #define EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) - (val)) - - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_SUB_FETCH_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_SUB_FETCH_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_SUB_FETCH_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_thread_fence.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_thread_fence.h deleted file mode 100644 index 183c7f3..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_thread_fence.h +++ /dev/null @@ -1,42 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_THREAD_FENCE_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_THREAD_FENCE_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_THREAD_FENCE_*() -// -#if defined(EA_COMPILER_MSVC) - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELAXED() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQUIRE() \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_RELEASE() \ - EASTL_ATOMIC_COMPILER_BARRIER() - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_ACQ_REL() \ - EASTL_ATOMIC_COMPILER_BARRIER() - -#endif - - -#if defined(EA_COMPILER_MSVC) || defined(__clang__) || defined(EA_COMPILER_GNUC) - - #define EASTL_ARCH_ATOMIC_THREAD_FENCE_SEQ_CST() \ - EASTL_ATOMIC_CPU_MB() - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_THREAD_FENCE_H */ diff --git a/include/EASTL/internal/atomic/arch/x86/arch_x86_xor_fetch.h b/include/EASTL/internal/atomic/arch/x86/arch_x86_xor_fetch.h deleted file mode 100644 index a5b62c3..0000000 --- a/include/EASTL/internal/atomic/arch/x86/arch_x86_xor_fetch.h +++ /dev/null @@ -1,96 +0,0 @@ -///////////////////////////////////////////////////////////////////////////////// -// Copyright (c) Electronic Arts Inc. All rights reserved. -///////////////////////////////////////////////////////////////////////////////// - - -#ifndef EASTL_ATOMIC_INTERNAL_ARCH_X86_XOR_FETCH_H -#define EASTL_ATOMIC_INTERNAL_ARCH_X86_XOR_FETCH_H - -#if defined(EA_PRAGMA_ONCE_SUPPORTED) - #pragma once -#endif - - -///////////////////////////////////////////////////////////////////////////////// -// -// void EASTL_ARCH_ATOMIC_XOR_FETCH_*_N(type, type ret, type * ptr, type val) -// -#if defined(EA_COMPILER_MSVC) && defined(EA_PROCESSOR_X86) - - - #define EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) ^ (val)) - - #define EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) ^ (val)) - - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_64(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_64_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - -#endif - - -#if ((defined(__clang__) || defined(EA_COMPILER_GNUC)) && defined(EA_PROCESSOR_X86_64)) - - - #define EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED(ret, observed, val) \ - ret = ((observed) ^ (val)) - - #define EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET(ret, prevObserved, val) \ - ret = ((prevObserved) ^ (val)) - - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELAXED_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELAXED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQUIRE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQUIRE, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_RELEASE_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, RELEASE, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_ACQ_REL_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, ACQ_REL, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - #define EASTL_ARCH_ATOMIC_XOR_FETCH_SEQ_CST_128(type, ret, ptr, val) \ - EASTL_ARCH_ATOMIC_X86_OP_128_IMPL(type, ret, ptr, val, SEQ_CST, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_PRE_COMPUTE_DESIRED, \ - EASTL_ARCH_ATOMIC_X86_XOR_FETCH_POST_COMPUTE_RET) - - -#endif - - -#endif /* EASTL_ATOMIC_INTERNAL_ARCH_X86_XOR_FETCH_H */ |