diff --git a/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp b/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp --- a/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp +++ b/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp @@ -2888,14 +2888,6 @@ // Otherwise, this instruction can be freely erased, // even if it is not side-effect free. - // Temporarily disable removal of volatile stores preceding unreachable, - // pending a potential LangRef change permitting volatile stores to trap. - // TODO: Either remove this code, or properly integrate the check into - // isGuaranteedToTransferExecutionToSuccessor(). - if (auto *SI = dyn_cast(Prev)) - if (SI->isVolatile()) - return nullptr; // Can not drop this instruction. We're done here. - // A value may still have uses before we process it here (for example, in // another unreachable block), so convert those to poison. replaceInstUsesWith(*Prev, PoisonValue::get(Prev->getType())); diff --git a/llvm/lib/Transforms/Utils/Local.cpp b/llvm/lib/Transforms/Utils/Local.cpp --- a/llvm/lib/Transforms/Utils/Local.cpp +++ b/llvm/lib/Transforms/Utils/Local.cpp @@ -2297,9 +2297,6 @@ // that they should be changed to unreachable by passes that can't // modify the CFG. - // Don't touch volatile stores. - if (SI->isVolatile()) continue; - Value *Ptr = SI->getOperand(1); if (isa(Ptr) || diff --git a/llvm/lib/Transforms/Utils/SimplifyCFG.cpp b/llvm/lib/Transforms/Utils/SimplifyCFG.cpp --- a/llvm/lib/Transforms/Utils/SimplifyCFG.cpp +++ b/llvm/lib/Transforms/Utils/SimplifyCFG.cpp @@ -4672,14 +4672,6 @@ // Otherwise, this instruction can be freely erased, // even if it is not side-effect free. - // Temporarily disable removal of volatile stores preceding unreachable, - // pending a potential LangRef change permitting volatile stores to trap. - // TODO: Either remove this code, or properly integrate the check into - // isGuaranteedToTransferExecutionToSuccessor(). - if (auto *SI = dyn_cast(&*BBI)) - if (SI->isVolatile()) - break; // Can not drop this instruction. We're done here. - // Note that deleting EH's here is in fact okay, although it involves a bit // of subtle reasoning. If this inst is an EH, all the predecessors of this // block will be the unwind edges of Invoke/CatchSwitch/CleanupReturn, diff --git a/llvm/test/CodeGen/X86/indirect-branch-tracking-eh2.ll b/llvm/test/CodeGen/X86/indirect-branch-tracking-eh2.ll --- a/llvm/test/CodeGen/X86/indirect-branch-tracking-eh2.ll +++ b/llvm/test/CodeGen/X86/indirect-branch-tracking-eh2.ll @@ -3,51 +3,38 @@ ; NUM-COUNT-3: endbr64 -;SJLJ: main: # @main -;SJLJ-NEXT: .Lfunc_begin0: -;SJLJ-NEXT: # %bb.0: # %entry -;SJLJ-NEXT: endbr64 -;SJLJ-NEXT: pushq %rbp -;SJLJ: callq _Unwind_SjLj_Register -;SJLJ-NEXT: .Ltmp0: -;SJLJ-NEXT: callq _Z3foov -;SJLJ-NEXT: .Ltmp1: -;SJLJ-NEXT: # %bb.1: # %invoke.cont -;SJLJ-NEXT: movl -;SJLJ-NEXT: .LBB0_7: # %return -;SJLJ: callq _Unwind_SjLj_Unregister -;SJLJ: retq -;SJLJ-NEXT: .LBB0_9: -;SJLJ-NEXT: endbr64 -;SJLJ-NEXT: movl -;SJLJ-NEXT: cmpl -;SJLJ-NEXT: jb .LBB0_10 -;SJLJ-NEXT: # %bb.11: -;SJLJ-NEXT: ud2 -;SJLJ-NEXT: .LBB0_10: -;SJLJ-NEXT: leaq .LJTI0_0(%rip), %rcx -;SJLJ-NEXT: jmpq *(%rcx,%rax,8) -;SJLJ-NEXT: .LBB0_2: # %lpad -;SJLJ-NEXT: .Ltmp2: -;SJLJ-NEXT: endbr64 -;SJLJ: jne .LBB0_4 -;SJLJ-NEXT: # %bb.3: # %catch3 -;SJLJ: callq __cxa_begin_catch -;SJLJ: jmp .LBB0_6 -;SJLJ-NEXT: .LBB0_4: # %catch.fallthrough -;SJLJ-NEXT: cmpl -;SJLJ-NEXT: jne .LBB0_8 -;SJLJ-NEXT: # %bb.5: # %catch -;SJLJ: callq __cxa_begin_catch -;SJLJ: cmpb -;SJLJ-NEXT: .LBB0_6: # %return -;SJLJ: callq __cxa_end_catch -;SJLJ-NEXT: jmp .LBB0_7 -;SJLJ-NEXT: .LBB0_8: # %eh.resume -;SJLJ-NEXT: movl -;SJLJ-NEXT: .Lfunc_end0: -;SJLJ: .LJTI0_0: -;SJLJ-NEXT: .quad .LBB0_2 +; SJLJ-LABEL: main: +; SJLJ: # %bb.0: # %entry +; SJLJ-NEXT: endbr64 +; SJLJ: callq _Unwind_SjLj_Register@PLT +; SJLJ-NEXT: .Ltmp0: +; SJLJ-NEXT: callq _Z3foov +; SJLJ-NEXT: .Ltmp1: +; SJLJ-NEXT: # %bb.1: # %invoke.cont +; SJLJ: .LBB0_6: # %return +; SJLJ: callq _Unwind_SjLj_Unregister@PLT +; SJLJ: retq +; SJLJ-NEXT: .LBB0_7: +; SJLJ-NEXT: endbr64 +; SJLJ: jb .LBB0_8 +; SJLJ-NEXT: # %bb.9: +; SJLJ-NEXT: ud2 +; SJLJ-NEXT: .LBB0_8: +; SJLJ: jmpq *(%rcx,%rax,8) +; SJLJ-NEXT: .LBB0_2: # %lpad +; SJLJ-NEXT: .Ltmp2: +; SJLJ-NEXT: endbr64 +; SJLJ: jne .LBB0_4 +; SJLJ-NEXT: # %bb.3: # %catch3 +; SJLJ: callq __cxa_begin_catch +; SJLJ: jmp .LBB0_5 +; SJLJ-NEXT: .LBB0_4: # %catch +; SJLJ: callq __cxa_begin_catch +; SJLJ: cmpb $3, %al +; SJLJ-NEXT: .LBB0_5: # %return +; SJLJ-NEXT: setne %cl +; SJLJ: callq __cxa_end_catch +; SJLJ-NEXT: jmp .LBB0_6 @_ZTIi = external dso_local constant i8* @_ZTIc = external dso_local constant i8* diff --git a/llvm/test/Transforms/InstCombine/volatile_store.ll b/llvm/test/Transforms/InstCombine/volatile_store.ll --- a/llvm/test/Transforms/InstCombine/volatile_store.ll +++ b/llvm/test/Transforms/InstCombine/volatile_store.ll @@ -25,7 +25,6 @@ ; CHECK-LABEL: @volatile_store_before_unreachable( ; CHECK-NEXT: br i1 [[C:%.*]], label [[TRUE:%.*]], label [[FALSE:%.*]] ; CHECK: true: -; CHECK-NEXT: store volatile i8 0, i8* [[P:%.*]], align 1 ; CHECK-NEXT: unreachable ; CHECK: false: ; CHECK-NEXT: ret void diff --git a/llvm/test/Transforms/SimplifyCFG/trapping-load-unreachable.ll b/llvm/test/Transforms/SimplifyCFG/trapping-load-unreachable.ll --- a/llvm/test/Transforms/SimplifyCFG/trapping-load-unreachable.ll +++ b/llvm/test/Transforms/SimplifyCFG/trapping-load-unreachable.ll @@ -76,8 +76,8 @@ define void @test3() nounwind { ; CHECK-LABEL: @test3( ; CHECK-NEXT: entry: -; CHECK-NEXT: store volatile i32 4, i32* null, align 4 -; CHECK-NEXT: ret void +; CHECK-NEXT: call void @llvm.trap() +; CHECK-NEXT: unreachable ; entry: store volatile i32 4, i32* null @@ -101,11 +101,8 @@ define void @test4(i1 %C, i32* %P) { ; CHECK-LABEL: @test4( ; CHECK-NEXT: entry: -; CHECK-NEXT: br i1 [[C:%.*]], label [[T:%.*]], label [[F:%.*]] -; CHECK: T: -; CHECK-NEXT: store volatile i32 0, i32* [[P:%.*]], align 4 -; CHECK-NEXT: unreachable -; CHECK: F: +; CHECK-NEXT: [[TMP0:%.*]] = xor i1 [[C:%.*]], true +; CHECK-NEXT: call void @llvm.assume(i1 [[TMP0]]) ; CHECK-NEXT: ret void ; entry: