Index: llvm/trunk/lib/Analysis/ConstantFolding.cpp =================================================================== --- llvm/trunk/lib/Analysis/ConstantFolding.cpp +++ llvm/trunk/lib/Analysis/ConstantFolding.cpp @@ -674,7 +674,7 @@ /// If array indices are not pointer-sized integers, explicitly cast them so /// that they aren't implicitly casted by the getelementptr. -static Constant *CastGEPIndices(Type *SrcTy, ArrayRef Ops, +static Constant *CastGEPIndices(Type *SrcElemTy, ArrayRef Ops, Type *ResultTy, const DataLayout &DL, const TargetLibraryInfo *TLI) { Type *IntPtrTy = DL.getIntPtrType(ResultTy); @@ -683,9 +683,7 @@ SmallVector NewIdxs; for (unsigned i = 1, e = Ops.size(); i != e; ++i) { if ((i == 1 || - !isa(GetElementPtrInst::getIndexedType( - cast(Ops[0]->getType()->getScalarType()) - ->getElementType(), + !isa(GetElementPtrInst::getIndexedType(SrcElemTy, Ops.slice(1, i - 1)))) && Ops[i]->getType() != IntPtrTy) { Any = true; @@ -701,7 +699,7 @@ if (!Any) return nullptr; - Constant *C = ConstantExpr::getGetElementPtr(SrcTy, Ops[0], NewIdxs); + Constant *C = ConstantExpr::getGetElementPtr(SrcElemTy, Ops[0], NewIdxs); if (ConstantExpr *CE = dyn_cast(C)) { if (Constant *Folded = ConstantFoldConstantExpression(CE, DL, TLI)) C = Folded; @@ -711,32 +709,41 @@ } /// Strip the pointer casts, but preserve the address space information. -static Constant* StripPtrCastKeepAS(Constant* Ptr) { +static Constant* StripPtrCastKeepAS(Constant* Ptr, Type *&ElemTy) { assert(Ptr->getType()->isPointerTy() && "Not a pointer type"); PointerType *OldPtrTy = cast(Ptr->getType()); Ptr = Ptr->stripPointerCasts(); PointerType *NewPtrTy = cast(Ptr->getType()); + ElemTy = NewPtrTy->getPointerElementType(); + // Preserve the address space number of the pointer. if (NewPtrTy->getAddressSpace() != OldPtrTy->getAddressSpace()) { - NewPtrTy = NewPtrTy->getElementType()->getPointerTo( - OldPtrTy->getAddressSpace()); + NewPtrTy = ElemTy->getPointerTo(OldPtrTy->getAddressSpace()); Ptr = ConstantExpr::getPointerCast(Ptr, NewPtrTy); } return Ptr; } /// If we can symbolically evaluate the GEP constant expression, do so. -static Constant *SymbolicallyEvaluateGEP(Type *SrcTy, ArrayRef Ops, - Type *ResultTy, const DataLayout &DL, +static Constant *SymbolicallyEvaluateGEP(const GEPOperator *GEP, + ArrayRef Ops, + const DataLayout &DL, const TargetLibraryInfo *TLI) { + Type *SrcElemTy = GEP->getSourceElementType(); + Type *ResElemTy = GEP->getResultElementType(); + Type *ResTy = GEP->getType(); + if (!SrcElemTy->isSized()) + return nullptr; + + if (Constant *C = CastGEPIndices(SrcElemTy, Ops, ResTy, DL, TLI)) + return C; + Constant *Ptr = Ops[0]; - if (!Ptr->getType()->getPointerElementType()->isSized() || - !Ptr->getType()->isPointerTy()) + if (!Ptr->getType()->isPointerTy()) return nullptr; Type *IntPtrTy = DL.getIntPtrType(Ptr->getType()); - Type *ResultElementTy = ResultTy->getPointerElementType(); // If this is a constant expr gep that is effectively computing an // "offsetof", fold it into 'cast int Size to T*' instead of 'gep 0, 0, 12' @@ -745,7 +752,7 @@ // If this is "gep i8* Ptr, (sub 0, V)", fold this as: // "inttoptr (sub (ptrtoint Ptr), V)" - if (Ops.size() == 2 && ResultElementTy->isIntegerTy(8)) { + if (Ops.size() == 2 && ResElemTy->isIntegerTy(8)) { ConstantExpr *CE = dyn_cast(Ops[1]); assert((!CE || CE->getType() == IntPtrTy) && "CastGEPIndices didn't canonicalize index types!"); @@ -753,7 +760,7 @@ CE->getOperand(0)->isNullValue()) { Constant *Res = ConstantExpr::getPtrToInt(Ptr, CE->getType()); Res = ConstantExpr::getSub(Res, CE->getOperand(1)); - Res = ConstantExpr::getIntToPtr(Res, ResultTy); + Res = ConstantExpr::getIntToPtr(Res, ResTy); if (ConstantExpr *ResCE = dyn_cast(Res)) Res = ConstantFoldConstantExpression(ResCE, DL, TLI); return Res; @@ -768,7 +775,7 @@ DL.getIndexedOffset( Ptr->getType(), makeArrayRef((Value * const *)Ops.data() + 1, Ops.size() - 1))); - Ptr = StripPtrCastKeepAS(Ptr); + Ptr = StripPtrCastKeepAS(Ptr, SrcElemTy); // If this is a GEP of a GEP, fold it all into a single GEP. while (GEPOperator *GEP = dyn_cast(Ptr)) { @@ -786,7 +793,7 @@ Ptr = cast(GEP->getOperand(0)); Offset += APInt(BitWidth, DL.getIndexedOffset(Ptr->getType(), NestedOps)); - Ptr = StripPtrCastKeepAS(Ptr); + Ptr = StripPtrCastKeepAS(Ptr, SrcElemTy); } // If the base value for this address is a literal integer value, fold the @@ -801,7 +808,7 @@ if (Ptr->isNullValue() || BasePtr != 0) { Constant *C = ConstantInt::get(Ptr->getContext(), Offset + BasePtr); - return ConstantExpr::getIntToPtr(C, ResultTy); + return ConstantExpr::getIntToPtr(C, ResTy); } // Otherwise form a regular getelementptr. Recompute the indices so that @@ -813,19 +820,26 @@ SmallVector NewIdxs; do { - if (SequentialType *ATy = dyn_cast(Ty)) { - if (ATy->isPointerTy()) { + if (!Ty->isStructTy()) { + if (Ty->isPointerTy()) { // The only pointer indexing we'll do is on the first index of the GEP. if (!NewIdxs.empty()) break; + Ty = SrcElemTy; + // Only handle pointers to sized types, not pointers to functions. - if (!ATy->getElementType()->isSized()) + if (!Ty->isSized()) return nullptr; + } else if (auto *ATy = dyn_cast(Ty)) { + Ty = ATy->getElementType(); + } else { + // We've reached some non-indexable type. + break; } // Determine which element of the array the offset points into. - APInt ElemSize(BitWidth, DL.getTypeAllocSize(ATy->getElementType())); + APInt ElemSize(BitWidth, DL.getTypeAllocSize(Ty)); if (ElemSize == 0) // The element size is 0. This may be [0 x Ty]*, so just use a zero // index for this level and proceed to the next level to see if it can @@ -838,8 +852,8 @@ Offset -= NewIdx * ElemSize; NewIdxs.push_back(ConstantInt::get(IntPtrTy, NewIdx)); } - Ty = ATy->getElementType(); - } else if (StructType *STy = dyn_cast(Ty)) { + } else { + StructType *STy = cast(Ty); // If we end up with an offset that isn't valid for this struct type, we // can't re-form this GEP in a regular form, so bail out. The pointer // operand likely went through casts that are necessary to make the GEP @@ -856,11 +870,8 @@ ElIdx)); Offset -= APInt(BitWidth, SL.getElementOffset(ElIdx)); Ty = STy->getTypeAtIndex(ElIdx); - } else { - // We've reached some non-indexable type. - break; } - } while (Ty != ResultElementTy); + } while (Ty != ResElemTy); // If we haven't used up the entire offset by descending the static // type, then the offset is pointing into the middle of an indivisible @@ -869,14 +880,14 @@ return nullptr; // Create a GEP. - Constant *C = ConstantExpr::getGetElementPtr(SrcTy, Ptr, NewIdxs); + Constant *C = ConstantExpr::getGetElementPtr(SrcElemTy, Ptr, NewIdxs); assert(C->getType()->getPointerElementType() == Ty && "Computed GetElementPtr has unexpected type!"); // If we ended up indexing a member with a type that doesn't match // the type of what the original indices indexed, add a cast. - if (Ty != ResultElementTy) - C = FoldBitCast(C, ResultTy, DL); + if (Ty != ResElemTy) + C = FoldBitCast(C, ResTy, DL); return C; } @@ -891,10 +902,13 @@ /// information, due to only being passed an opcode and operands. Constant /// folding using this function strips this information. /// -static Constant *ConstantFoldInstOperandsImpl(unsigned Opcode, Type *DestTy, +static Constant *ConstantFoldInstOperandsImpl(const Value *InstOrCE, + unsigned Opcode, ArrayRef Ops, const DataLayout &DL, const TargetLibraryInfo *TLI) { + Type *DestTy = InstOrCE->getType(); + // Handle easy binops first. if (Instruction::isBinaryOp(Opcode)) return ConstantFoldBinaryOpOperands(Opcode, Ops[0], Ops[1], DL); @@ -902,6 +916,14 @@ if (Instruction::isCast(Opcode)) return ConstantFoldCastOperand(Opcode, Ops[0], DestTy, DL); + if(auto *GEP = dyn_cast(InstOrCE)) { + if (Constant *C = SymbolicallyEvaluateGEP(GEP, Ops, DL, TLI)) + return C; + + return ConstantExpr::getGetElementPtr(GEP->getSourceElementType(), + Ops[0], Ops.slice(1)); + } + switch (Opcode) { default: return nullptr; case Instruction::ICmp: @@ -919,15 +941,6 @@ return ConstantExpr::getInsertElement(Ops[0], Ops[1], Ops[2]); case Instruction::ShuffleVector: return ConstantExpr::getShuffleVector(Ops[0], Ops[1], Ops[2]); - case Instruction::GetElementPtr: { - Type *SrcTy = nullptr; - if (Constant *C = CastGEPIndices(SrcTy, Ops, DestTy, DL, TLI)) - return C; - if (Constant *C = SymbolicallyEvaluateGEP(SrcTy, Ops, DestTy, DL, TLI)) - return C; - - return ConstantExpr::getGetElementPtr(SrcTy, Ops[0], Ops.slice(1)); - } } } @@ -1009,7 +1022,7 @@ EVI->getIndices()); } - return ConstantFoldInstOperandsImpl(I->getOpcode(), I->getType(), Ops, DL, TLI); + return ConstantFoldInstOperands(I, Ops, DL, TLI); } static Constant * @@ -1032,7 +1045,8 @@ if (CE->isCompare()) return ConstantFoldCompareInstOperands(CE->getPredicate(), Ops[0], Ops[1], DL, TLI); - return ConstantFoldInstOperandsImpl(CE->getOpcode(), CE->getType(), Ops, DL, TLI); + + return ConstantFoldInstOperandsImpl(CE, CE->getOpcode(), Ops, DL, TLI); } /// Attempt to fold the constant expression @@ -1049,8 +1063,7 @@ ArrayRef Ops, const DataLayout &DL, const TargetLibraryInfo *TLI) { - return ConstantFoldInstOperandsImpl(I->getOpcode(), I->getType(), Ops, DL, - TLI); + return ConstantFoldInstOperandsImpl(I, I->getOpcode(), Ops, DL, TLI); } /// Attempt to constant fold a compare