Index: compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp =================================================================== --- compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp +++ compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp @@ -402,11 +402,12 @@ template static bool AtomicCAS(ThreadState *thr, uptr pc, volatile T *a, T *c, T v, morder mo, morder fmo) { - (void)fmo; // Unused because llvm does not pass it yet. MemoryWriteAtomic(thr, pc, (uptr)a, SizeLog()); SyncVar *s = 0; bool write_lock = mo != mo_acquire && mo != mo_consume; - if (mo != mo_relaxed) { + bool should_lock = mo != mo_relaxed || fmo != mo_relaxed; + + if (should_lock) { s = ctx->metamap.GetOrCreateAndLock(thr, pc, (uptr)a, write_lock); thr->fast_state.IncrementEpoch(); // Can't increment epoch w/o writing to the trace as well. @@ -420,15 +421,35 @@ } T cc = *c; T pr = func_cas(a, cc, v); - if (s) { + if (pr == cc) { + if (!s) + return true; if (write_lock) s->mtx.Unlock(); else s->mtx.ReadUnlock(); - } - if (pr == cc) return true; + } + + // Honor failure memory order. *c = pr; + CHECK(IsLoadOrder(fmo)); + if (!s) + return false; + + // 31.7.2.18: "The failure argument shall not be memory_order_release + // nor memory_order_acq_rel". LLVM (2021-05) fallbacks to Monotonic (relaxed) + // when those are used. + if (IsAcquireOrder(fmo)) + AcquireImpl(thr, pc, &s->clock); + + if (s) { + if (write_lock) + s->mtx.Unlock(); + else + s->mtx.ReadUnlock(); + } + return false; } Index: compiler-rt/test/tsan/compare_exchange.cpp =================================================================== --- /dev/null +++ compiler-rt/test/tsan/compare_exchange.cpp @@ -0,0 +1,99 @@ +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DRELEASE_ACQUIRE -o %t && %run %t 2>&1 | FileCheck %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DRELEASE_RELAXED -o %t && %deflake %run %t 2>&1 | FileCheck --check-prefix=CHECK-REPORT %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DRELACQ_ACQUIRE -o %t && %run %t 2>&1 | FileCheck %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DRELACQ_RELAXED -o %t && %run %t 2>&1 | FileCheck %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DRELEASE_CONSUME -o %t && %deflake %run %t 2>&1 | FileCheck --check-prefix=CHECK-REPORT %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DSEQ_CST_RELAX -o %t && %run %t 2>&1 | FileCheck %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DSEQ_CST_ACQUIRE -o %t && %run %t 2>&1 | FileCheck %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DSEQ_CST_CONSUME -o %t && %run %t 2>&1 | FileCheck %s +// RUN: %clangxx_tsan -O1 %s %link_libcxx_tsan -DRELAXED_ACQUIRE -o %t && %deflake %run %t 2>&1 | FileCheck --check-prefix=CHECK-REPORT %s +#include +#include +#include +#include + +#ifdef RELEASE_ACQUIRE +static constexpr auto SuccessOrder = std::memory_order_release; +static constexpr auto FailureOrder = std::memory_order_acquire; +#endif + +#ifdef RELEASE_RELAXED +static constexpr auto SuccessOrder = std::memory_order_release; +static constexpr auto FailureOrder = std::memory_order_relaxed; +#endif + +#ifdef RELACQ_ACQUIRE +static constexpr auto SuccessOrder = std::memory_order_acq_rel; +static constexpr auto FailureOrder = std::memory_order_acquire; +#endif + +#ifdef RELACQ_RELAXED +static constexpr auto SuccessOrder = std::memory_order_acq_rel; +static constexpr auto FailureOrder = std::memory_order_relaxed; +#endif + +#ifdef RELEASE_CONSUME +static constexpr auto SuccessOrder = std::memory_order_release; +static constexpr auto FailureOrder = std::memory_order_consume; +#endif + +#ifdef SEQ_CST_RELAX +static constexpr auto SuccessOrder = std::memory_order_seq_cst; +static constexpr auto FailureOrder = std::memory_order_relaxed; +#endif + +#ifdef SEQ_CST_ACQUIRE +static constexpr auto SuccessOrder = std::memory_order_seq_cst; +static constexpr auto FailureOrder = std::memory_order_acquire; +#endif + +#ifdef SEQ_CST_CONSUME +static constexpr auto SuccessOrder = std::memory_order_seq_cst; +static constexpr auto FailureOrder = std::memory_order_consume; +#endif + +#ifdef RELAXED_ACQUIRE +static constexpr auto SuccessOrder = std::memory_order_relaxed; +static constexpr auto FailureOrder = std::memory_order_acquire; +#endif + +// FIXME: both RELEASE_CONSUME and RELAXED_ACQUIRE currently gives +// false positives due to LLVM (2021-05) fallback to other modes in +// pre C++17 rules. Fix this test once they get fixed. +struct node { + int val; +}; +std::atomic _node{nullptr}; + +void f1() { + auto n = new node(); + n->val = 42; + _node.store(n, std::memory_order_release); +} + +void f2() { + node *expected = nullptr; + while (expected == nullptr) { + _node.compare_exchange_weak(expected, nullptr, SuccessOrder, + FailureOrder); + }; + + ++expected->val; + assert(expected->val == 43); +} + +int main() { + std::thread t1(f1); + std::thread t2(f2); + + t1.join(); + t2.join(); + + fprintf(stderr, "DONE\n"); + return 0; +} + +// CHECK-NOT: WARNING: ThreadSanitizer: data race +// CHECK: DONE + +// CHECK-REPORT: WARNING: ThreadSanitizer: data race \ No newline at end of file Index: llvm/test/Instrumentation/ThreadSanitizer/atomic.ll =================================================================== --- llvm/test/Instrumentation/ThreadSanitizer/atomic.ll +++ llvm/test/Instrumentation/ThreadSanitizer/atomic.ll @@ -370,6 +370,14 @@ ; CHECK-LABEL: atomic8_cas_release ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 0), !dbg +define void @atomic8_cas_rel_acq(i8* %a) nounwind uwtable { +entry: + cmpxchg i8* %a, i8 0, i8 1 release acquire, !dbg !7 + ret void, !dbg !7 +} +; CHECK-LABEL: atomic8_cas_rel_acq +; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 2), !dbg + define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable { entry: cmpxchg i8* %a, i8 0, i8 1 acq_rel acquire, !dbg !7