Index: llvm/trunk/lib/Analysis/LoopAccessAnalysis.cpp =================================================================== --- llvm/trunk/lib/Analysis/LoopAccessAnalysis.cpp +++ llvm/trunk/lib/Analysis/LoopAccessAnalysis.cpp @@ -1189,12 +1189,25 @@ unsigned IdxWidth = DL.getIndexSizeInBits(ASA); Type *Ty = cast(PtrA->getType())->getElementType(); - APInt Size(IdxWidth, DL.getTypeStoreSize(Ty)); APInt OffsetA(IdxWidth, 0), OffsetB(IdxWidth, 0); PtrA = PtrA->stripAndAccumulateInBoundsConstantOffsets(DL, OffsetA); PtrB = PtrB->stripAndAccumulateInBoundsConstantOffsets(DL, OffsetB); + // Retrieve the address space again as pointer stripping now tracks through + // `addrspacecast`. + ASA = cast(PtrA->getType())->getAddressSpace(); + ASB = cast(PtrB->getType())->getAddressSpace(); + // Check that the address spaces match and that the pointers are valid. + if (ASA != ASB) + return false; + + IdxWidth = DL.getIndexSizeInBits(ASA); + OffsetA = OffsetA.sextOrTrunc(IdxWidth); + OffsetB = OffsetB.sextOrTrunc(IdxWidth); + + APInt Size(IdxWidth, DL.getTypeStoreSize(Ty)); + // OffsetDelta = OffsetB - OffsetA; const SCEV *OffsetSCEVA = SE.getConstant(OffsetA); const SCEV *OffsetSCEVB = SE.getConstant(OffsetB); Index: llvm/trunk/test/Transforms/SLPVectorizer/AMDGPU/address-space-ptr-sze-gep-index-assert.ll =================================================================== --- llvm/trunk/test/Transforms/SLPVectorizer/AMDGPU/address-space-ptr-sze-gep-index-assert.ll +++ llvm/trunk/test/Transforms/SLPVectorizer/AMDGPU/address-space-ptr-sze-gep-index-assert.ll @@ -147,3 +147,16 @@ store i32 %sub1, i32* undef ret void } + +; CHECK-LABEL: slp_crash_on_addrspacecast +; CHECK: ret void +define void @slp_crash_on_addrspacecast() { +entry: + %0 = getelementptr inbounds i64, i64 addrspace(3)* undef, i32 undef + %p0 = addrspacecast i64 addrspace(3)* %0 to i64* + store i64 undef, i64* %p0, align 8 + %1 = getelementptr inbounds i64, i64 addrspace(3)* undef, i32 undef + %p1 = addrspacecast i64 addrspace(3)* %1 to i64* + store i64 undef, i64* %p1, align 8 + ret void +}