diff --git a/llvm/include/llvm/IR/Value.h b/llvm/include/llvm/IR/Value.h --- a/llvm/include/llvm/IR/Value.h +++ b/llvm/include/llvm/IR/Value.h @@ -493,12 +493,15 @@ /// swifterror attribute. bool isSwiftError() const; - /// Strip off pointer casts, all-zero GEPs, and aliases. + /// Strip off pointer casts, all-zero GEPs, address space casts, and aliases. + /// + /// \param KeepBitPattern If set, the bit pattern of the result is guaranteed + /// to be equal to the bit pattern of this value. /// /// Returns the original uncasted value. If this is called on a non-pointer /// value, it returns 'this'. - const Value *stripPointerCasts() const; - Value *stripPointerCasts() { + const Value *stripPointerCasts(bool KeepBitPattern = false) const; + Value *stripPointerCasts(bool KeepBitPattern = false) { return const_cast( static_cast(this)->stripPointerCasts()); } diff --git a/llvm/lib/Analysis/LazyValueInfo.cpp b/llvm/lib/Analysis/LazyValueInfo.cpp --- a/llvm/lib/Analysis/LazyValueInfo.cpp +++ b/llvm/lib/Analysis/LazyValueInfo.cpp @@ -1739,7 +1739,7 @@ // through would still be correct. const DataLayout &DL = CxtI->getModule()->getDataLayout(); if (V->getType()->isPointerTy() && C->isNullValue() && - isKnownNonZero(V->stripPointerCasts(), DL)) { + isKnownNonZero(V->stripPointerCasts(/* KeepBitPattern */ true), DL)) { if (Pred == ICmpInst::ICMP_EQ) return LazyValueInfo::False; else if (Pred == ICmpInst::ICMP_NE) diff --git a/llvm/lib/IR/Value.cpp b/llvm/lib/IR/Value.cpp --- a/llvm/lib/IR/Value.cpp +++ b/llvm/lib/IR/Value.cpp @@ -466,10 +466,24 @@ }; template -static const Value *stripPointerCastsAndOffsets(const Value *V) { +static const Value *stripPointerCastsAndOffsets(const Value *V, + bool KeepBitPattern = false) { if (!V->getType()->isPointerTy()) return V; + // Sanity check, no-op in non-assert builds and for ZeroIndice StripKinds. + switch (StripKind) { + case PSK_ZeroIndicesAndAliases: + case PSK_ZeroIndicesAndAliasesAndInvariantGroups: + case PSK_ZeroIndices: + // For these kinds it makes sense to force keeping the bit pattern. + break; + default: + assert(!KeepBitPattern && + "Cannot keep the bit pattern and strip GEP indices!"); + break; + } + // Even though we don't look through PHI nodes, we could be called on an // instruction in an unreachable block, which may be on a cycle. SmallPtrSet Visited; @@ -494,7 +508,9 @@ break; } V = GEP->getPointerOperand(); - } else if (Operator::getOpcode(V) == Instruction::BitCast || + } else if (Operator::getOpcode(V) == Instruction::BitCast) { + V = cast(V)->getOperand(0); + } else if (!KeepBitPattern && Operator::getOpcode(V) == Instruction::AddrSpaceCast) { V = cast(V)->getOperand(0); } else if (auto *GA = dyn_cast(V)) { @@ -526,8 +542,9 @@ } } // end anonymous namespace -const Value *Value::stripPointerCasts() const { - return stripPointerCastsAndOffsets(this); +const Value *Value::stripPointerCasts(bool KeepBitPattern) const { + return stripPointerCastsAndOffsets(this, + KeepBitPattern); } const Value *Value::stripPointerCastsNoFollowAliases() const {