Index: llvm/trunk/lib/Transforms/InstCombine/InstCombineCalls.cpp =================================================================== --- llvm/trunk/lib/Transforms/InstCombine/InstCombineCalls.cpp +++ llvm/trunk/lib/Transforms/InstCombine/InstCombineCalls.cpp @@ -1908,8 +1908,7 @@ if (Changed) return II; } - // For vector result intrinsics, use the generic demanded vector support to - // simplify any operands before moving on to the per-intrinsic rules. + // For vector result intrinsics, use the generic demanded vector support. if (II->getType()->isVectorTy()) { auto VWidth = II->getType()->getVectorNumElements(); APInt UndefElts(VWidth, 0); Index: llvm/trunk/lib/Transforms/InstCombine/InstCombineSimplifyDemanded.cpp =================================================================== --- llvm/trunk/lib/Transforms/InstCombine/InstCombineSimplifyDemanded.cpp +++ llvm/trunk/lib/Transforms/InstCombine/InstCombineSimplifyDemanded.cpp @@ -1175,9 +1175,18 @@ // wouldn't have a vector result to get here. Note that we intentionally // merge the undef bits here since gepping with either an undef base or // index results in undef. - for (unsigned i = 0; i < I->getNumOperands(); i++) - if (I->getOperand(i)->getType()->isVectorTy()) - simplifyAndSetOp(I, i, DemandedElts, UndefElts); + for (unsigned i = 0; i < I->getNumOperands(); i++) { + if (isa(I->getOperand(i))) { + // If the entire vector is undefined, just return this info. + UndefElts = EltMask; + return nullptr; + } + if (I->getOperand(i)->getType()->isVectorTy()) { + APInt UndefEltsOp(VWidth, 0); + simplifyAndSetOp(I, i, DemandedElts, UndefEltsOp); + UndefElts |= UndefEltsOp; + } + } break; } @@ -1663,5 +1672,10 @@ UndefElts &= UndefElts2; } + // If we've proven all of the lanes undef, return an undef value. + // TODO: Intersect w/demanded lanes + if (UndefElts.isAllOnesValue()) + return UndefValue::get(I->getType());; + return MadeChange ? I : nullptr; } Index: llvm/trunk/lib/Transforms/InstCombine/InstructionCombining.cpp =================================================================== --- llvm/trunk/lib/Transforms/InstCombine/InstructionCombining.cpp +++ llvm/trunk/lib/Transforms/InstCombine/InstructionCombining.cpp @@ -1557,6 +1557,19 @@ if (Value *V = SimplifyGEPInst(GEPEltType, Ops, SQ.getWithInstruction(&GEP))) return replaceInstUsesWith(GEP, V); + // For vector geps, use the generic demanded vector support. + if (GEP.getType()->isVectorTy()) { + auto VWidth = GEP.getType()->getVectorNumElements(); + APInt UndefElts(VWidth, 0); + APInt AllOnesEltMask(APInt::getAllOnesValue(VWidth)); + if (Value *V = SimplifyDemandedVectorElts(&GEP, AllOnesEltMask, + UndefElts)) { + if (V != &GEP) + return replaceInstUsesWith(GEP, V); + return &GEP; + } + } + Value *PtrOp = GEP.getOperand(0); // Eliminate unneeded casts for indices, and replace indices which displace Index: llvm/trunk/test/Transforms/InstCombine/vec_demanded_elts.ll =================================================================== --- llvm/trunk/test/Transforms/InstCombine/vec_demanded_elts.ll +++ llvm/trunk/test/Transforms/InstCombine/vec_demanded_elts.ll @@ -620,10 +620,7 @@ define <2 x i32*> @gep_all_lanes_undef(i32* %base, i64 %idx) {; ; CHECK-LABEL: @gep_all_lanes_undef( -; CHECK-NEXT: [[BASEVEC:%.*]] = insertelement <2 x i32*> undef, i32* [[BASE:%.*]], i32 0 -; CHECK-NEXT: [[IDXVEC:%.*]] = insertelement <2 x i64> undef, i64 [[IDX:%.*]], i32 1 -; CHECK-NEXT: [[GEP:%.*]] = getelementptr i32, <2 x i32*> [[BASEVEC]], <2 x i64> [[IDXVEC]] -; CHECK-NEXT: ret <2 x i32*> [[GEP]] +; CHECK-NEXT: ret <2 x i32*> undef ; %basevec = insertelement <2 x i32*> undef, i32* %base, i32 0 %idxvec = insertelement <2 x i64> undef, i64 %idx, i32 1 @@ -641,4 +638,3 @@ %ee = extractelement <2 x i32*> %gep, i32 1 ret i32* %ee } -