diff --git a/llvm/docs/LangRef.rst b/llvm/docs/LangRef.rst --- a/llvm/docs/LangRef.rst +++ b/llvm/docs/LangRef.rst @@ -2867,6 +2867,12 @@ so operations which modify memory or may have undefined behavior can be hoisted past a volatile operation. +As an exception to the preceding rule, the compiler may not assume execution +will continue after a volatile store operation. This restriction is necessary +to support the somewhat common pattern in C of intentionally storing to an +invalid pointer to crash the program. In the future, it might make sense to +allow frontends to control this behavior. + IR-level volatile loads and stores cannot safely be optimized into llvm.memcpy or llvm.memmove intrinsics even when those intrinsics are flagged volatile. Likewise, the backend should never split or merge target-legal volatile diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp --- a/llvm/lib/Analysis/ValueTracking.cpp +++ b/llvm/lib/Analysis/ValueTracking.cpp @@ -5274,6 +5274,10 @@ if (isa(I)) return false; + // Note: Do not add new checks here; instead, change Instruction::mayThrow or + // Instruction::willReturn. + // + // FIXME: Move this check into Instruction::willReturn. if (isa(I)) { switch (classifyEHPersonality(I->getFunction()->getPersonalityFn())) { default: diff --git a/llvm/lib/IR/Instruction.cpp b/llvm/lib/IR/Instruction.cpp --- a/llvm/lib/IR/Instruction.cpp +++ b/llvm/lib/IR/Instruction.cpp @@ -673,6 +673,10 @@ } bool Instruction::willReturn() const { + // Volatile store isn't guaranteed to return; see LangRef. + if (auto *SI = dyn_cast(this)) + return !SI->isVolatile(); + if (const auto *CB = dyn_cast(this)) // FIXME: Temporarily assume that all side-effect free intrinsics will // return. Remove this workaround once all intrinsics are appropriately diff --git a/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp b/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp --- a/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp +++ b/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp @@ -2928,14 +2928,6 @@ // Otherwise, this instruction can be freely erased, // even if it is not side-effect free. - // Temporarily disable removal of volatile stores preceding unreachable, - // pending a potential LangRef change permitting volatile stores to trap. - // TODO: Either remove this code, or properly integrate the check into - // isGuaranteedToTransferExecutionToSuccessor(). - if (auto *SI = dyn_cast(Prev)) - if (SI->isVolatile()) - return nullptr; // Can not drop this instruction. We're done here. - // A value may still have uses before we process it here (for example, in // another unreachable block), so convert those to poison. replaceInstUsesWith(*Prev, PoisonValue::get(Prev->getType())); diff --git a/llvm/lib/Transforms/Utils/SimplifyCFG.cpp b/llvm/lib/Transforms/Utils/SimplifyCFG.cpp --- a/llvm/lib/Transforms/Utils/SimplifyCFG.cpp +++ b/llvm/lib/Transforms/Utils/SimplifyCFG.cpp @@ -4699,14 +4699,6 @@ // Otherwise, this instruction can be freely erased, // even if it is not side-effect free. - // Temporarily disable removal of volatile stores preceding unreachable, - // pending a potential LangRef change permitting volatile stores to trap. - // TODO: Either remove this code, or properly integrate the check into - // isGuaranteedToTransferExecutionToSuccessor(). - if (auto *SI = dyn_cast(&*BBI)) - if (SI->isVolatile()) - break; // Can not drop this instruction. We're done here. - // Note that deleting EH's here is in fact okay, although it involves a bit // of subtle reasoning. If this inst is an EH, all the predecessors of this // block will be the unwind edges of Invoke/CatchSwitch/CleanupReturn, diff --git a/llvm/test/Transforms/FunctionAttrs/nosync.ll b/llvm/test/Transforms/FunctionAttrs/nosync.ll --- a/llvm/test/Transforms/FunctionAttrs/nosync.ll +++ b/llvm/test/Transforms/FunctionAttrs/nosync.ll @@ -161,7 +161,7 @@ ; negative, should not deduce nosync ; atomic load with release ordering define void @load_release(i32* nocapture %0) norecurse nounwind uwtable { -; CHECK: Function Attrs: mustprogress nofree norecurse nounwind uwtable willreturn +; CHECK: Function Attrs: nofree norecurse nounwind uwtable ; CHECK-LABEL: @load_release( ; CHECK-NEXT: store atomic volatile i32 10, i32* [[TMP0:%.*]] release, align 4 ; CHECK-NEXT: ret void @@ -172,7 +172,7 @@ ; negative volatile, relaxed atomic define void @load_volatile_release(i32* nocapture %0) norecurse nounwind uwtable { -; CHECK: Function Attrs: mustprogress nofree norecurse nounwind uwtable willreturn +; CHECK: Function Attrs: nofree norecurse nounwind uwtable ; CHECK-LABEL: @load_volatile_release( ; CHECK-NEXT: store atomic volatile i32 10, i32* [[TMP0:%.*]] release, align 4 ; CHECK-NEXT: ret void @@ -183,7 +183,7 @@ ; volatile store. define void @volatile_store(i32* %0) norecurse nounwind uwtable { -; CHECK: Function Attrs: mustprogress nofree norecurse nounwind uwtable willreturn +; CHECK: Function Attrs: nofree norecurse nounwind uwtable ; CHECK-LABEL: @volatile_store( ; CHECK-NEXT: store volatile i32 14, i32* [[TMP0:%.*]], align 4 ; CHECK-NEXT: ret void diff --git a/llvm/test/Transforms/LICM/sink-debuginfo-preserve.ll b/llvm/test/Transforms/LICM/sink-debuginfo-preserve.ll --- a/llvm/test/Transforms/LICM/sink-debuginfo-preserve.ll +++ b/llvm/test/Transforms/LICM/sink-debuginfo-preserve.ll @@ -68,8 +68,6 @@ bb17: ; preds = %bb16, %bb13 %i18 = load volatile i32, i32* @c, align 4, !dbg !57, !tbaa !40 - %i19 = add nsw i32 %i18, 1, !dbg !57 - store volatile i32 %i19, i32* @c, align 4, !dbg !57, !tbaa !40 %i20 = load volatile i32, i32* @c, align 4, !dbg !46, !tbaa !40 %i21 = icmp slt i32 %i20, 6, !dbg !47 br i1 %i21, label %bb13, label %bb22, !dbg !48, !llvm.loop !58 diff --git a/llvm/test/Transforms/PhaseOrdering/inlining-alignment-assumptions.ll b/llvm/test/Transforms/PhaseOrdering/inlining-alignment-assumptions.ll --- a/llvm/test/Transforms/PhaseOrdering/inlining-alignment-assumptions.ll +++ b/llvm/test/Transforms/PhaseOrdering/inlining-alignment-assumptions.ll @@ -34,7 +34,7 @@ ; ASSUMPTIONS-ON-LABEL: @caller1( ; ASSUMPTIONS-ON-NEXT: br i1 [[C:%.*]], label [[COMMON_RET:%.*]], label [[FALSE1:%.*]] ; ASSUMPTIONS-ON: false1: -; ASSUMPTIONS-ON-NEXT: store volatile i64 1, i64* [[PTR:%.*]], align 8 +; ASSUMPTIONS-ON-NEXT: store volatile i64 1, i64* [[PTR:%.*]], align 4 ; ASSUMPTIONS-ON-NEXT: br label [[COMMON_RET]] ; ASSUMPTIONS-ON: common.ret: ; ASSUMPTIONS-ON-NEXT: [[DOTSINK:%.*]] = phi i64 [ 3, [[FALSE1]] ], [ 2, [[TMP0:%.*]] ]