diff --git a/include/EASTL/internal/config.h b/include/EASTL/internal/config.h index 6c13117b..03677dcf 100644 --- a/include/EASTL/internal/config.h +++ b/include/EASTL/internal/config.h @@ -1836,4 +1836,18 @@ typedef EASTL_SSIZE_T eastl_ssize_t; // Signed version of eastl_size_t. Concept EA_RESTORE_VC_WARNING(); \ EA_RESTORE_GCC_WARNING(); +#if defined(__has_feature) + #if __has_feature(thread_sanitizer) + #define EASTL_TSAN_ENABLED 1 + #else + #define EASTL_TSAN_ENABLED 0 + #endif +#else + #if defined(__SANITIZE_THREAD__) + #define EASTL_TSAN_ENABLED 1 + #else + #define EASTL_TSAN_ENABLED 0 + #endif +#endif + #endif // Header include guard diff --git a/include/EASTL/shared_ptr.h b/include/EASTL/shared_ptr.h index 9e37a091..4b87fad7 100644 --- a/include/EASTL/shared_ptr.h +++ b/include/EASTL/shared_ptr.h @@ -162,12 +162,19 @@ namespace eastl inline void ref_count_sp::release() { EASTL_ASSERT((mRefCount.load(memory_order_relaxed) > 0)); - if(mRefCount.fetch_sub(1, memory_order_release) == 1) - { - atomic_thread_fence(memory_order_acquire); - free_value(); - } - + #if !EASTL_TSAN_ENABLED + if(mRefCount.fetch_sub(1, memory_order_release) == 1) + { + atomic_thread_fence(memory_order_acquire); + free_value(); + } + #else + // NOTE: TSAN does not support atomic_thread_fences + if(mRefCount.fetch_sub(1, memory_order_acq_rel) == 1) + { + free_value(); + } + #endif weak_release(); } @@ -179,11 +186,18 @@ namespace eastl inline void ref_count_sp::weak_release() { EASTL_ASSERT(mWeakRefCount.load(memory_order_relaxed) > 0); - if(mWeakRefCount.fetch_sub(1, memory_order_release) == 1) - { - atomic_thread_fence(memory_order_acquire); - free_ref_count_sp(); - } + #if !EASTL_TSAN_ENABLED + if(mWeakRefCount.fetch_sub(1, memory_order_release) == 1) + { + atomic_thread_fence(memory_order_acquire); + free_ref_count_sp(); + } + #else + if(mWeakRefCount.fetch_sub(1, memory_order_acq_rel) == 1) + { + free_ref_count_sp(); + } + #endif } inline ref_count_sp* ref_count_sp::lock() EA_NOEXCEPT