diff --git a/llvm/lib/Analysis/ScalarEvolution.cpp b/llvm/lib/Analysis/ScalarEvolution.cpp --- a/llvm/lib/Analysis/ScalarEvolution.cpp +++ b/llvm/lib/Analysis/ScalarEvolution.cpp @@ -3532,9 +3532,8 @@ : SCEV::FlagAnyWrap; const SCEV *TotalOffset = getZero(IntIdxTy); - // The array size is unimportant. The first thing we do on CurTy is getting - // its element type. - Type *CurTy = ArrayType::get(GEP->getSourceElementType(), 0); + Type *CurTy = GEP->getType(); + bool FirstIter = true; for (const SCEV *IndexExpr : IndexExprs) { // Compute the (potentially symbolic) offset in bytes for this index. if (StructType *STy = dyn_cast(CurTy)) { @@ -3550,7 +3549,14 @@ CurTy = STy->getTypeAtIndex(Index); } else { // Update CurTy to its element type. - CurTy = cast(CurTy)->getElementType(); + if (FirstIter) { + assert(isa(CurTy) && + "The first index of a GEP indexes a pointer"); + CurTy = GEP->getSourceElementType(); + FirstIter = false; + } else { + CurTy = cast(CurTy)->getElementType(); + } // For an array, add the element offset, explicitly scaled. const SCEV *ElementSize = getSizeOfExpr(IntIdxTy, CurTy); // Getelementptr indices are signed. @@ -3754,6 +3760,14 @@ // We can bypass creating a target-independent // constant expression and then folding it back into a ConstantInt. // This is just a compile-time optimization. + if (auto *VecTy = dyn_cast(AllocTy)) { + if (VecTy->isScalable()) { + Constant *NullPtr = Constant::getNullValue(AllocTy->getPointerTo()); + Constant *One = ConstantInt::get(IntTy, 1); + Constant *GEP = ConstantExpr::getGetElementPtr(AllocTy, NullPtr, One); + return getSCEV(ConstantExpr::getPtrToInt(GEP, IntTy)); + } + } return getConstant(IntTy, getDataLayout().getTypeAllocSize(AllocTy)); } diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp --- a/llvm/lib/Analysis/ValueTracking.cpp +++ b/llvm/lib/Analysis/ValueTracking.cpp @@ -1447,7 +1447,7 @@ break; } unsigned GEPOpiBits = Index->getType()->getScalarSizeInBits(); - uint64_t TypeSize = Q.DL.getTypeAllocSize(IndexedTy); + uint64_t TypeSize = Q.DL.getTypeAllocSize(IndexedTy).getKnownMinSize(); LocalKnown.Zero = LocalKnown.One = APInt(GEPOpiBits, 0); computeKnownBits(Index, LocalKnown, Depth + 1, Q); TrailZ = std::min(TrailZ, diff --git a/llvm/test/Analysis/ScalarEvolution/scalable-vector.ll b/llvm/test/Analysis/ScalarEvolution/scalable-vector.ll new file mode 100644 --- /dev/null +++ b/llvm/test/Analysis/ScalarEvolution/scalable-vector.ll @@ -0,0 +1,11 @@ +; RUN: opt -scalar-evolution -analyze < %s | FileCheck %s + +; CHECK: %1 = getelementptr , * null, i32 3 +; CHECK: --> (3 * sizeof()) U: [0,-15) S: [-9223372036854775808,9223372036854775793) +; CHECK: %2 = getelementptr , * %p, i32 1 +; CHECK: --> (sizeof() + %p) U: full-set S: full-set +define void @a( *%p) { + getelementptr , *null, i32 3 + getelementptr , *%p, i32 1 + ret void +}