diff --git a/llvm/lib/Transforms/InstCombine/InstCombineCalls.cpp b/llvm/lib/Transforms/InstCombine/InstCombineCalls.cpp --- a/llvm/lib/Transforms/InstCombine/InstCombineCalls.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombineCalls.cpp @@ -2270,10 +2270,12 @@ !CalleeF->isDeclaration()) { Instruction *OldCall = &Call; CreateNonTerminatorUnreachable(OldCall); - // If OldCall does not return void then replaceInstUsesWith undef. + // If OldCall does not return void then replaceInstUsesWith poison. // This allows ValueHandlers and custom metadata to adjust itself. if (!OldCall->getType()->isVoidTy()) - replaceInstUsesWith(*OldCall, UndefValue::get(OldCall->getType())); + // Use poison instead of undef because poison can be aggressively + // folded. + replaceInstUsesWith(*OldCall, PoisonValue::get(OldCall->getType())); if (isa(OldCall)) return eraseInstFromFunction(*OldCall); @@ -2286,13 +2288,16 @@ } } + // Calling a null function pointer is undefined if a null address isn't + // dereferenceable. if ((isa(Callee) && !NullPointerIsDefined(Call.getFunction())) || isa(Callee)) { - // If Call does not return void then replaceInstUsesWith undef. + // If Call does not return void then replaceInstUsesWith poison. + // Use poison value instead of undef because it can be aggressively folded. // This allows ValueHandlers and custom metadata to adjust itself. if (!Call.getType()->isVoidTy()) - replaceInstUsesWith(Call, UndefValue::get(Call.getType())); + replaceInstUsesWith(Call, PoisonValue::get(Call.getType())); if (Call.isTerminator()) { // Can't remove an invoke or callbr because we cannot change the CFG. diff --git a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp --- a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp @@ -2548,7 +2548,8 @@ NewV = combineLoadToNewType(*LI, DestTy); // Remove the old load and its use in the old phi, which itself becomes // dead once the whole transform finishes. - replaceInstUsesWith(*LI, UndefValue::get(LI->getType())); + // Use poison instead of undef because it can be aggressively folded. + replaceInstUsesWith(*LI, PoisonValue::get(LI->getType())); eraseInstFromFunction(*LI); } else if (auto *BCI = dyn_cast(V)) { NewV = BCI->getOperand(0); diff --git a/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp b/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp --- a/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp @@ -981,10 +981,12 @@ // that this code is not reachable. We do this instead of inserting // an unreachable instruction directly because we cannot modify the // CFG. - StoreInst *SI = new StoreInst(UndefValue::get(LI.getType()), + // Use poison value instead of undef because using poison allows more + // aggressive folding. + StoreInst *SI = new StoreInst(PoisonValue::get(LI.getType()), Constant::getNullValue(Op->getType()), &LI); SI->setDebugLoc(LI.getDebugLoc()); - return replaceInstUsesWith(LI, UndefValue::get(LI.getType())); + return replaceInstUsesWith(LI, PoisonValue::get(LI.getType())); } if (Op->hasOneUse()) { @@ -1332,7 +1334,8 @@ IC.Builder.SetInsertPoint(USI); combineStoreToNewValue(IC, *USI, NewLI); } - IC.replaceInstUsesWith(*LI, UndefValue::get(LI->getType())); + // Use poison instead of undef because it can be aggressively folded. + IC.replaceInstUsesWith(*LI, PoisonValue::get(LI->getType())); IC.eraseInstFromFunction(*LI); return true; } @@ -1440,7 +1443,7 @@ // store X, GEP(null, Y) -> turns into 'unreachable' in SimplifyCFG if (canSimplifyNullStoreOrGEP(SI)) { if (!isa(Val)) - return replaceOperand(SI, 0, UndefValue::get(Val->getType())); + return replaceOperand(SI, 0, PoisonValue::get(Val->getType())); return nullptr; // Do not modify these! } diff --git a/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp b/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp --- a/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombinePHI.cpp @@ -1119,7 +1119,8 @@ // If we have no users, they must be all self uses, just nuke the PHI. if (PHIUsers.empty()) - return replaceInstUsesWith(FirstPhi, UndefValue::get(FirstPhi.getType())); + // Use poison instead of undef because it can be aggressively folded. + return replaceInstUsesWith(FirstPhi, PoisonValue::get(FirstPhi.getType())); // If this phi node is transformable, create new PHIs for all the pieces // extracted out of it. First, sort the users by their offset and size. @@ -1218,11 +1219,12 @@ } // Replace all the remaining uses of the PHI nodes (self uses and the lshrs) - // with undefs. - Value *Undef = UndefValue::get(FirstPhi.getType()); + // with posions. + // Use poison instead of undef because it can be aggressively folded. + Value *Poison = PoisonValue::get(FirstPhi.getType()); for (unsigned i = 1, e = PHIsToSlice.size(); i != e; ++i) - replaceInstUsesWith(*PHIsToSlice[i], Undef); - return replaceInstUsesWith(FirstPhi, Undef); + replaceInstUsesWith(*PHIsToSlice[i], Poison); + return replaceInstUsesWith(FirstPhi, Poison); } static Value *SimplifyUsingControlFlow(InstCombiner &Self, PHINode &PN, @@ -1346,7 +1348,8 @@ SmallPtrSet PotentiallyDeadPHIs; PotentiallyDeadPHIs.insert(&PN); if (DeadPHICycle(PU, PotentiallyDeadPHIs)) - return replaceInstUsesWith(PN, UndefValue::get(PN.getType())); + // Use poison instead of undef because it can be aggressively folded. + return replaceInstUsesWith(PN, PoisonValue::get(PN.getType())); } // If this phi has a single use, and if that use just computes a value for @@ -1358,7 +1361,7 @@ if (PHIUser->hasOneUse() && (isa(PHIUser) || isa(PHIUser)) && PHIUser->user_back() == &PN) { - return replaceInstUsesWith(PN, UndefValue::get(PN.getType())); + return replaceInstUsesWith(PN, PoisonValue::get(PN.getType())); } // When a PHI is used only to be compared with zero, it is safe to replace // an incoming value proved as known nonzero with any non-zero constant. diff --git a/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp b/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp --- a/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp +++ b/llvm/lib/Transforms/InstCombine/InstructionCombining.cpp @@ -2665,7 +2665,9 @@ } else { // Casts, GEP, or anything else: we're about to delete this instruction, // so it can not have any valid uses. - replaceInstUsesWith(*I, UndefValue::get(I->getType())); + // Use poison value instead of undef because using poison allows more + // aggressive folding. + replaceInstUsesWith(*I, PoisonValue::get(I->getType())); } eraseInstFromFunction(*I); } @@ -2870,8 +2872,8 @@ return nullptr; // A value may still have uses before we process it here (for example, in - // another unreachable block), so convert those to undef. - replaceInstUsesWith(*Prev, UndefValue::get(Prev->getType())); + // another unreachable block), so convert those to poison. + replaceInstUsesWith(*Prev, PoisonValue::get(Prev->getType())); eraseInstFromFunction(*Prev); return &I; } @@ -3064,7 +3066,9 @@ if (*EV.idx_begin() == 0) { Instruction::BinaryOps BinOp = WO->getBinaryOp(); Value *LHS = WO->getLHS(), *RHS = WO->getRHS(); - replaceInstUsesWith(*WO, UndefValue::get(WO->getType())); + // Replace the old instruction's uses with poison. + // Use poison instead of undef because it can be aggressively folded. + replaceInstUsesWith(*WO, PoisonValue::get(WO->getType())); eraseInstFromFunction(*WO); return BinaryOperator::Create(BinOp, LHS, RHS); } diff --git a/llvm/test/Transforms/GVN/PRE/2017-06-28-pre-load-dbgloc.ll b/llvm/test/Transforms/GVN/PRE/2017-06-28-pre-load-dbgloc.ll --- a/llvm/test/Transforms/GVN/PRE/2017-06-28-pre-load-dbgloc.ll +++ b/llvm/test/Transforms/GVN/PRE/2017-06-28-pre-load-dbgloc.ll @@ -38,7 +38,7 @@ ; ALL: br i1 %tobool, label %entry.cond.end_crit_edge, label %cond.false, !dbg [[LOC_15_6:![0-9]+]] ; ALL: entry.cond.end_crit_edge: ; GVN: %.pre = load %struct.node*, %struct.node** null, align 8, !dbg [[LOC_16_13:![0-9]+]] -; INSTCOMBINE:store %struct.node* undef, %struct.node** null, align 536870912, !dbg [[LOC_16_13:![0-9]+]] +; INSTCOMBINE:store %struct.node* poison, %struct.node** null, align 536870912, !dbg [[LOC_16_13:![0-9]+]] cond.false: %0 = bitcast %struct.desc* %desc to i8***, !dbg !11 diff --git a/llvm/test/Transforms/InstCombine/atomic.ll b/llvm/test/Transforms/InstCombine/atomic.ll --- a/llvm/test/Transforms/InstCombine/atomic.ll +++ b/llvm/test/Transforms/InstCombine/atomic.ll @@ -119,8 +119,8 @@ ; ordering imposed. define i32 @test9() { ; CHECK-LABEL: @test9( -; CHECK-NEXT: store i32 undef, i32* null, align 536870912 -; CHECK-NEXT: ret i32 undef +; CHECK-NEXT: store i32 poison, i32* null, align 536870912 +; CHECK-NEXT: ret i32 poison ; %x = load atomic i32, i32* null unordered, align 4 ret i32 %x @@ -177,7 +177,7 @@ ; ordering imposed. define i32 @test12() { ; CHECK-LABEL: @test12( -; CHECK-NEXT: store atomic i32 undef, i32* null unordered, align 536870912 +; CHECK-NEXT: store atomic i32 poison, i32* null unordered, align 536870912 ; CHECK-NEXT: ret i32 0 ; store atomic i32 0, i32* null unordered, align 4 diff --git a/llvm/test/Transforms/InstCombine/builtin-object-size-ptr.ll b/llvm/test/Transforms/InstCombine/builtin-object-size-ptr.ll --- a/llvm/test/Transforms/InstCombine/builtin-object-size-ptr.ll +++ b/llvm/test/Transforms/InstCombine/builtin-object-size-ptr.ll @@ -60,7 +60,7 @@ define {}* @minimal_invariant_start_use(i8 %x) { ; CHECK-LABEL: @minimal_invariant_start_use( -; CHECK-NEXT: ret {}* undef +; CHECK-NEXT: ret {}* poison ; %a = alloca i8 %i = call {}* @llvm.invariant.start.p0i8(i64 1, i8* %a) diff --git a/llvm/test/Transforms/InstCombine/getelementptr.ll b/llvm/test/Transforms/InstCombine/getelementptr.ll --- a/llvm/test/Transforms/InstCombine/getelementptr.ll +++ b/llvm/test/Transforms/InstCombine/getelementptr.ll @@ -495,7 +495,7 @@ define void @test25() { ; CHECK-LABEL: @test25( ; CHECK-NEXT: entry: -; CHECK-NEXT: store i64 undef, i64* null, align 536870912 +; CHECK-NEXT: store i64 poison, i64* null, align 536870912 ; CHECK-NEXT: tail call void @foo25(i32 0, i64 0) ; CHECK-NEXT: unreachable ; @@ -613,12 +613,8 @@ define i32 @test29(i8* %start, i32 %X) nounwind { ; CHECK-LABEL: @test29( ; CHECK-NEXT: entry: -; CHECK-NEXT: store i64 undef, i64* null, align 536870912 -; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr i8, i8* [[START:%.*]], i64 undef -; CHECK-NEXT: [[TMP0:%.*]] = sext i32 [[X:%.*]] to i64 -; CHECK-NEXT: [[ADD_PTR212:%.*]] = getelementptr i8, i8* [[START]], i64 [[TMP0]] -; CHECK-NEXT: [[CMP214:%.*]] = icmp ugt i8* [[ADD_PTR212]], [[ADD_PTR]] -; CHECK-NEXT: br i1 [[CMP214]], label [[IF_THEN216:%.*]], label [[IF_END363:%.*]] +; CHECK-NEXT: store i64 poison, i64* null, align 536870912 +; CHECK-NEXT: br i1 false, label [[IF_THEN216:%.*]], label [[IF_END363:%.*]] ; CHECK: if.then216: ; CHECK-NEXT: ret i32 1 ; CHECK: if.end363: @@ -1232,7 +1228,7 @@ define i32* @PR45084(i1 %cond) { ; CHECK-LABEL: @PR45084( -; CHECK-NEXT: [[GEP:%.*]] = select i1 [[COND:%.*]], i32* getelementptr inbounds ([[STRUCT_F:%.*]], %struct.f* @g0, i64 0, i32 0), i32* getelementptr inbounds ([[STRUCT_F]], %struct.f* @g1, i64 0, i32 0), !prof !0 +; CHECK-NEXT: [[GEP:%.*]] = select i1 [[COND:%.*]], i32* getelementptr inbounds ([[STRUCT_F:%.*]], %struct.f* @g0, i64 0, i32 0), i32* getelementptr inbounds ([[STRUCT_F]], %struct.f* @g1, i64 0, i32 0), !prof [[PROF0:![0-9]+]] ; CHECK-NEXT: ret i32* [[GEP]] ; %sel = select i1 %cond, %struct.f* @g0, %struct.f* @g1, !prof !0 diff --git a/llvm/test/Transforms/InstCombine/load.ll b/llvm/test/Transforms/InstCombine/load.ll --- a/llvm/test/Transforms/InstCombine/load.ll +++ b/llvm/test/Transforms/InstCombine/load.ll @@ -59,8 +59,8 @@ define i32 @load_gep_null_inbounds(i64 %X) { ; CHECK-LABEL: @load_gep_null_inbounds( -; CHECK-NEXT: store i32 undef, i32* null, align 536870912 -; CHECK-NEXT: ret i32 undef +; CHECK-NEXT: store i32 poison, i32* null, align 536870912 +; CHECK-NEXT: ret i32 poison ; %V = getelementptr inbounds i32, i32* null, i64 %X %R = load i32, i32* %V @@ -69,8 +69,8 @@ define i32 @load_gep_null_not_inbounds(i64 %X) { ; CHECK-LABEL: @load_gep_null_not_inbounds( -; CHECK-NEXT: store i32 undef, i32* null, align 536870912 -; CHECK-NEXT: ret i32 undef +; CHECK-NEXT: store i32 poison, i32* null, align 536870912 +; CHECK-NEXT: ret i32 poison ; %V = getelementptr i32, i32* null, i64 %X %R = load i32, i32* %V diff --git a/llvm/test/Transforms/InstCombine/pr44245.ll b/llvm/test/Transforms/InstCombine/pr44245.ll --- a/llvm/test/Transforms/InstCombine/pr44245.ll +++ b/llvm/test/Transforms/InstCombine/pr44245.ll @@ -159,7 +159,7 @@ ; CHECK: cond.true133: ; CHECK-NEXT: br label [[COND_END144:%.*]] ; CHECK: cond.false138: -; CHECK-NEXT: store %type_2* undef, %type_2** null, align 536870912 +; CHECK-NEXT: store %type_2* poison, %type_2** null, align 536870912 ; CHECK-NEXT: br label [[COND_END144]] ; CHECK: cond.end144: ; CHECK-NEXT: br label [[WHILE_COND]] diff --git a/llvm/test/Transforms/InstCombine/store.ll b/llvm/test/Transforms/InstCombine/store.ll --- a/llvm/test/Transforms/InstCombine/store.ll +++ b/llvm/test/Transforms/InstCombine/store.ll @@ -4,7 +4,7 @@ define void @test1(i32* %P) { ; CHECK-LABEL: @test1( ; CHECK-NEXT: store i32 123, i32* undef, align 4 -; CHECK-NEXT: store i32 undef, i32* null, align 536870912 +; CHECK-NEXT: store i32 poison, i32* null, align 536870912 ; CHECK-NEXT: ret void ; store i32 undef, i32* %P @@ -26,7 +26,7 @@ define void @store_at_gep_off_null_inbounds(i64 %offset) { ; CHECK-LABEL: @store_at_gep_off_null_inbounds( ; CHECK-NEXT: [[PTR:%.*]] = getelementptr inbounds i32, i32* null, i64 [[OFFSET:%.*]] -; CHECK-NEXT: store i32 undef, i32* [[PTR]], align 4 +; CHECK-NEXT: store i32 poison, i32* [[PTR]], align 4 ; CHECK-NEXT: ret void ; %ptr = getelementptr inbounds i32, i32 *null, i64 %offset @@ -37,7 +37,7 @@ define void @store_at_gep_off_null_not_inbounds(i64 %offset) { ; CHECK-LABEL: @store_at_gep_off_null_not_inbounds( ; CHECK-NEXT: [[PTR:%.*]] = getelementptr i32, i32* null, i64 [[OFFSET:%.*]] -; CHECK-NEXT: store i32 undef, i32* [[PTR]], align 4 +; CHECK-NEXT: store i32 poison, i32* [[PTR]], align 4 ; CHECK-NEXT: ret void ; %ptr = getelementptr i32, i32 *null, i64 %offset @@ -141,14 +141,14 @@ ; CHECK-NEXT: br label [[FOR_COND:%.*]] ; CHECK: for.cond: ; CHECK-NEXT: [[STOREMERGE:%.*]] = phi i32 [ 42, [[ENTRY:%.*]] ], [ [[INC:%.*]], [[FOR_BODY:%.*]] ] -; CHECK-NEXT: store i32 [[STOREMERGE]], i32* [[GI:%.*]], align 4, [[TBAA0:!tbaa !.*]] +; CHECK-NEXT: store i32 [[STOREMERGE]], i32* [[GI:%.*]], align 4, !tbaa [[TBAA0:![0-9]+]] ; CHECK-NEXT: [[CMP:%.*]] = icmp slt i32 [[STOREMERGE]], [[N:%.*]] ; CHECK-NEXT: br i1 [[CMP]], label [[FOR_BODY]], label [[FOR_END:%.*]] ; CHECK: for.body: ; CHECK-NEXT: [[IDXPROM:%.*]] = sext i32 [[STOREMERGE]] to i64 ; CHECK-NEXT: [[ARRAYIDX:%.*]] = getelementptr inbounds float, float* [[A:%.*]], i64 [[IDXPROM]] -; CHECK-NEXT: store float 0.000000e+00, float* [[ARRAYIDX]], align 4, [[TBAA4:!tbaa !.*]] -; CHECK-NEXT: [[TMP0:%.*]] = load i32, i32* [[GI]], align 4, [[TBAA0]] +; CHECK-NEXT: store float 0.000000e+00, float* [[ARRAYIDX]], align 4, !tbaa [[TBAA4:![0-9]+]] +; CHECK-NEXT: [[TMP0:%.*]] = load i32, i32* [[GI]], align 4, !tbaa [[TBAA0]] ; CHECK-NEXT: [[INC]] = add nsw i32 [[TMP0]], 1 ; CHECK-NEXT: br label [[FOR_COND]] ; CHECK: for.end: