Index: lib/Analysis/MemoryDependenceAnalysis.cpp =================================================================== --- lib/Analysis/MemoryDependenceAnalysis.cpp +++ lib/Analysis/MemoryDependenceAnalysis.cpp @@ -409,9 +409,18 @@ // a load depends on another must aliased load from the same value. if (LoadInst *LI = dyn_cast(Inst)) { // Atomic loads have complications involved. + // A monotonic load is OK if the query inst is itself not atomic. // FIXME: This is overly conservative. - if (!LI->isUnordered()) - return MemDepResult::getClobber(LI); + if (!LI->isUnordered()) { + if (!QueryInst || LI->getOrdering() != Monotonic) + return MemDepResult::getClobber(LI); + if (auto *QueryLI = dyn_cast(QueryInst)) + if (!QueryLI->isUnordered()) + return MemDepResult::getClobber(LI); + if (auto *QuerySI = dyn_cast(QueryInst)) + if (!QuerySI->isUnordered()) + return MemDepResult::getClobber(LI); + } AliasAnalysis::Location LoadLoc = AA->getLocation(LI); @@ -469,9 +478,18 @@ if (StoreInst *SI = dyn_cast(Inst)) { // Atomic stores have complications involved. + // A monotonic store is OK if the query inst is itself not atomic. // FIXME: This is overly conservative. - if (!SI->isUnordered()) - return MemDepResult::getClobber(SI); + if (!SI->isUnordered()) { + if (!QueryInst || SI->getOrdering() != Monotonic) + return MemDepResult::getClobber(SI); + if (auto *QueryLI = dyn_cast(QueryInst)) + if (!QueryLI->isUnordered()) + return MemDepResult::getClobber(SI); + if (auto *QuerySI = dyn_cast(QueryInst)) + if (!QuerySI->isUnordered()) + return MemDepResult::getClobber(SI); + } // If alias analysis can tell that this store is guaranteed to not modify // the query pointer, ignore it. Use getModRefInfo to handle cases where Index: test/Transforms/DeadStoreElimination/atomic.ll =================================================================== --- test/Transforms/DeadStoreElimination/atomic.ll +++ test/Transforms/DeadStoreElimination/atomic.ll @@ -105,3 +105,50 @@ ret i32 %x } +; DSE across monotonic load (allowed as long as the eliminated store isUnordered) +define i32 @test9() nounwind uwtable ssp { +; CHECK: test9 +; CHECK-NOT: store i32 0 +; CHECK: store i32 1 +entry: + store i32 0, i32* @x + %x = load atomic i32* @y monotonic, align 4 + store i32 1, i32* @x + ret i32 %x +} + +; DSE across monotonic store (allowed as long as the eliminated store isUnordered) +define void @test10() nounwind uwtable ssp { +; CHECK: test10 +; CHECK-NOT: store i32 0 +; CHECK: store i32 1 +entry: + store i32 0, i32* @x + store atomic i32 42, i32* @y monotonic, align 4 + store i32 1, i32* @x + ret void +} + +; DSE across monotonic load (forbidden since the eliminated store is atomic) +define i32 @test11() nounwind uwtable ssp { +; CHECK: test11 +; CHECK: store atomic i32 0 +; CHECK: store atomic i32 1 +entry: + store atomic i32 0, i32* @x monotonic, align 4 + %x = load atomic i32* @y monotonic, align 4 + store atomic i32 1, i32* @x monotonic, align 4 + ret i32 %x +} + +; DSE across monotonic store (forbidden since the eliminated store is atomic) +define void @test12() nounwind uwtable ssp { +; CHECK: test12 +; CHECK: store atomic i32 0 +; CHECK: store atomic i32 1 +entry: + store atomic i32 0, i32* @x monotonic, align 4 + store atomic i32 42, i32* @y monotonic, align 4 + store atomic i32 1, i32* @x monotonic, align 4 + ret void +}