Index: include/llvm/Analysis/Utils/Local.h =================================================================== --- include/llvm/Analysis/Utils/Local.h +++ include/llvm/Analysis/Utils/Local.h @@ -413,9 +413,17 @@ /// Remove all blocks that can not be reached from the function's entry. /// -/// Returns true if any basic block was removed. +/// \param ExamineInstructions If true, then reachable instructions will be +/// analysed and when possible instructions are +/// rewritten into `unreachable`. This can result +/// in more blocks being unreachable compared to +/// just following the BB successor lists. +/// +/// \returns True if any basic block was removed (or if any instructions were +/// rewritten due to 'ExamineInstructions') bool removeUnreachableBlocks(Function &F, LazyValueInfo *LVI = nullptr, - DeferredDominance *DDT = nullptr); + DeferredDominance *DDT = nullptr, + bool ExamineInstructions = true); /// Combine the metadata of two instructions so that K can replace J /// Index: lib/Transforms/Utils/Local.cpp =================================================================== --- lib/Transforms/Utils/Local.cpp +++ lib/Transforms/Utils/Local.cpp @@ -1743,157 +1743,169 @@ return Split; } -static bool markAliveBlocks(Function &F, - SmallPtrSetImpl &Reachable, - DeferredDominance *DDT = nullptr) { - SmallVector Worklist; - BasicBlock *BB = &F.front(); - Worklist.push_back(BB); - Reachable.insert(BB); +static bool rewriteUnreachableInstrs(Function &F, + BasicBlock *BB, + DeferredDominance *DDT = nullptr) { bool Changed = false; - do { - BB = Worklist.pop_back_val(); + // Do a quick scan of the basic block, turning any obviously unreachable + // instructions into LLVM unreachable insts. The instruction combining pass + // canonicalizes unreachable insts into stores to null or undef. + for (Instruction &I : *BB) { + // Assumptions that are known to be false are equivalent to unreachable. + // Also, if the condition is undefined, then we make the choice most + // beneficial to the optimizer, and choose that to also be unreachable. + if (auto *II = dyn_cast(&I)) { + if (II->getIntrinsicID() == Intrinsic::assume) { + if (match(II->getArgOperand(0), m_CombineOr(m_Zero(), m_Undef()))) { + // Don't insert a call to llvm.trap right before the unreachable. + changeToUnreachable(II, false, false, DDT); + Changed = true; + break; + } + } - // Do a quick scan of the basic block, turning any obviously unreachable - // instructions into LLVM unreachable insts. The instruction combining pass - // canonicalizes unreachable insts into stores to null or undef. - for (Instruction &I : *BB) { - // Assumptions that are known to be false are equivalent to unreachable. - // Also, if the condition is undefined, then we make the choice most - // beneficial to the optimizer, and choose that to also be unreachable. - if (auto *II = dyn_cast(&I)) { - if (II->getIntrinsicID() == Intrinsic::assume) { - if (match(II->getArgOperand(0), m_CombineOr(m_Zero(), m_Undef()))) { - // Don't insert a call to llvm.trap right before the unreachable. - changeToUnreachable(II, false, false, DDT); + if (II->getIntrinsicID() == Intrinsic::experimental_guard) { + // A call to the guard intrinsic bails out of the current compilation + // unit if the predicate passed to it is false. If the predicate is a + // constant false, then we know the guard will bail out of the current + // compile unconditionally, so all code following it is dead. + // + // Note: unlike in llvm.assume, it is not "obviously profitable" for + // guards to treat `undef` as `false` since a guard on `undef` can + // still be useful for widening. + if (match(II->getArgOperand(0), m_Zero())) + if (!isa(II->getNextNode())) { + changeToUnreachable(II->getNextNode(), /*UseLLVMTrap=*/false, + false, DDT); Changed = true; break; } - } - - if (II->getIntrinsicID() == Intrinsic::experimental_guard) { - // A call to the guard intrinsic bails out of the current compilation - // unit if the predicate passed to it is false. If the predicate is a - // constant false, then we know the guard will bail out of the current - // compile unconditionally, so all code following it is dead. - // - // Note: unlike in llvm.assume, it is not "obviously profitable" for - // guards to treat `undef` as `false` since a guard on `undef` can - // still be useful for widening. - if (match(II->getArgOperand(0), m_Zero())) - if (!isa(II->getNextNode())) { - changeToUnreachable(II->getNextNode(), /*UseLLVMTrap=*/false, - false, DDT); - Changed = true; - break; - } - } } + } - if (auto *CI = dyn_cast(&I)) { - Value *Callee = CI->getCalledValue(); - if (isa(Callee) || isa(Callee)) { - changeToUnreachable(CI, /*UseLLVMTrap=*/false, false, DDT); + if (auto *CI = dyn_cast(&I)) { + Value *Callee = CI->getCalledValue(); + if (isa(Callee) || isa(Callee)) { + changeToUnreachable(CI, /*UseLLVMTrap=*/false, false, DDT); + Changed = true; + break; + } + if (CI->doesNotReturn()) { + // If we found a call to a no-return function, insert an unreachable + // instruction after it. Make sure there isn't *already* one there + // though. + if (!isa(CI->getNextNode())) { + // Don't insert a call to llvm.trap right before the unreachable. + changeToUnreachable(CI->getNextNode(), false, false, DDT); Changed = true; - break; - } - if (CI->doesNotReturn()) { - // If we found a call to a no-return function, insert an unreachable - // instruction after it. Make sure there isn't *already* one there - // though. - if (!isa(CI->getNextNode())) { - // Don't insert a call to llvm.trap right before the unreachable. - changeToUnreachable(CI->getNextNode(), false, false, DDT); - Changed = true; - } - break; } + break; } + } - // Store to undef and store to null are undefined and used to signal that - // they should be changed to unreachable by passes that can't modify the - // CFG. - if (auto *SI = dyn_cast(&I)) { - // Don't touch volatile stores. - if (SI->isVolatile()) continue; + // Store to undef and store to null are undefined and used to signal that + // they should be changed to unreachable by passes that can't modify the + // CFG. + if (auto *SI = dyn_cast(&I)) { + // Don't touch volatile stores. + if (SI->isVolatile()) continue; - Value *Ptr = SI->getOperand(1); + Value *Ptr = SI->getOperand(1); - if (isa(Ptr) || - (isa(Ptr) && - SI->getPointerAddressSpace() == 0)) { - changeToUnreachable(SI, true, false, DDT); - Changed = true; - break; - } + if (isa(Ptr) || + (isa(Ptr) && + SI->getPointerAddressSpace() == 0)) { + changeToUnreachable(SI, true, false, DDT); + Changed = true; + break; } } + } - TerminatorInst *Terminator = BB->getTerminator(); - if (auto *II = dyn_cast(Terminator)) { - // Turn invokes that call 'nounwind' functions into ordinary calls. - Value *Callee = II->getCalledValue(); - if (isa(Callee) || isa(Callee)) { - changeToUnreachable(II, true, false, DDT); - Changed = true; - } else if (II->doesNotThrow() && canSimplifyInvokeNoUnwind(&F)) { - if (II->use_empty() && II->onlyReadsMemory()) { - // jump to the normal destination branch. - BasicBlock *NormalDestBB = II->getNormalDest(); - BasicBlock *UnwindDestBB = II->getUnwindDest(); - BranchInst::Create(NormalDestBB, II); - UnwindDestBB->removePredecessor(II->getParent()); - II->eraseFromParent(); - if (DDT) - DDT->deleteEdge(BB, UnwindDestBB); - } else - changeToCall(II, DDT); - Changed = true; + TerminatorInst *Terminator = BB->getTerminator(); + if (auto *II = dyn_cast(Terminator)) { + // Turn invokes that call 'nounwind' functions into ordinary calls. + Value *Callee = II->getCalledValue(); + if (isa(Callee) || isa(Callee)) { + changeToUnreachable(II, true, false, DDT); + Changed = true; + } else if (II->doesNotThrow() && canSimplifyInvokeNoUnwind(&F)) { + if (II->use_empty() && II->onlyReadsMemory()) { + // jump to the normal destination branch. + BasicBlock *NormalDestBB = II->getNormalDest(); + BasicBlock *UnwindDestBB = II->getUnwindDest(); + BranchInst::Create(NormalDestBB, II); + UnwindDestBB->removePredecessor(II->getParent()); + II->eraseFromParent(); + if (DDT) + DDT->deleteEdge(BB, UnwindDestBB); + } else + changeToCall(II, DDT); + Changed = true; + } + } else if (auto *CatchSwitch = dyn_cast(Terminator)) { + // Remove catchpads which cannot be reached. + struct CatchPadDenseMapInfo { + static CatchPadInst *getEmptyKey() { + return DenseMapInfo::getEmptyKey(); } - } else if (auto *CatchSwitch = dyn_cast(Terminator)) { - // Remove catchpads which cannot be reached. - struct CatchPadDenseMapInfo { - static CatchPadInst *getEmptyKey() { - return DenseMapInfo::getEmptyKey(); - } - static CatchPadInst *getTombstoneKey() { - return DenseMapInfo::getTombstoneKey(); - } + static CatchPadInst *getTombstoneKey() { + return DenseMapInfo::getTombstoneKey(); + } - static unsigned getHashValue(CatchPadInst *CatchPad) { - return static_cast(hash_combine_range( - CatchPad->value_op_begin(), CatchPad->value_op_end())); - } + static unsigned getHashValue(CatchPadInst *CatchPad) { + return static_cast(hash_combine_range( + CatchPad->value_op_begin(), CatchPad->value_op_end())); + } - static bool isEqual(CatchPadInst *LHS, CatchPadInst *RHS) { - if (LHS == getEmptyKey() || LHS == getTombstoneKey() || - RHS == getEmptyKey() || RHS == getTombstoneKey()) - return LHS == RHS; - return LHS->isIdenticalTo(RHS); - } - }; - - // Set of unique CatchPads. - SmallDenseMap> - HandlerSet; - detail::DenseSetEmpty Empty; - for (CatchSwitchInst::handler_iterator I = CatchSwitch->handler_begin(), - E = CatchSwitch->handler_end(); - I != E; ++I) { - BasicBlock *HandlerBB = *I; - auto *CatchPad = cast(HandlerBB->getFirstNonPHI()); - if (!HandlerSet.insert({CatchPad, Empty}).second) { - CatchSwitch->removeHandler(I); - --I; - --E; - Changed = true; - } + static bool isEqual(CatchPadInst *LHS, CatchPadInst *RHS) { + if (LHS == getEmptyKey() || LHS == getTombstoneKey() || + RHS == getEmptyKey() || RHS == getTombstoneKey()) + return LHS == RHS; + return LHS->isIdenticalTo(RHS); + } + }; + + // Set of unique CatchPads. + SmallDenseMap> + HandlerSet; + detail::DenseSetEmpty Empty; + for (CatchSwitchInst::handler_iterator I = CatchSwitch->handler_begin(), + E = CatchSwitch->handler_end(); + I != E; ++I) { + BasicBlock *HandlerBB = *I; + auto *CatchPad = cast(HandlerBB->getFirstNonPHI()); + if (!HandlerSet.insert({CatchPad, Empty}).second) { + CatchSwitch->removeHandler(I); + --I; + --E; + Changed = true; } } + } + + Changed |= ConstantFoldTerminator(BB, true, nullptr, DDT); + + return Changed; +} + +static bool markAliveBlocks(Function &F, + SmallPtrSetImpl &Reachable, + bool ExamineInstructions, + DeferredDominance *DDT = nullptr) { + SmallVector Worklist; + BasicBlock *BB = &F.front(); + Worklist.push_back(BB); + Reachable.insert(BB); + bool Changed = false; + do { + BB = Worklist.pop_back_val(); + + if (ExamineInstructions) + Changed |= rewriteUnreachableInstrs(F, BB, DDT); - Changed |= ConstantFoldTerminator(BB, true, nullptr, DDT); for (BasicBlock *Successor : successors(BB)) if (Reachable.insert(Successor).second) Worklist.push_back(Successor); @@ -1942,9 +1954,10 @@ /// otherwise. If `LVI` is passed, this function preserves LazyValueInfo /// after modifying the CFG. bool llvm::removeUnreachableBlocks(Function &F, LazyValueInfo *LVI, - DeferredDominance *DDT) { + DeferredDominance *DDT, + bool ExamineInstructions) { SmallPtrSet Reachable; - bool Changed = markAliveBlocks(F, Reachable, DDT); + bool Changed = markAliveBlocks(F, Reachable, ExamineInstructions, DDT); // If there are unreachable blocks in the CFG... if (Reachable.size() == F.size())