Index: include/llvm/Analysis/Loads.h =================================================================== --- include/llvm/Analysis/Loads.h +++ include/llvm/Analysis/Loads.h @@ -24,11 +24,17 @@ class MDNode; /// isSafeToLoadUnconditionally - Return true if we know that executing a load -/// from this value cannot trap. If it is not obviously safe to load from the -/// specified pointer, we do a quick local scan of the basic block containing -/// ScanFrom, to determine if the address is already accessed. +/// from this value cannot trap. +/// +/// If the ScanFrom is specified this method performs context-sensitive analysis +/// and returns true if it is safe to load immediately before the ScanFrom. +/// +/// If it is not obviously safe to load from the specified pointer, we do a +/// quick local scan of the basic block containing ScanFrom, to determine if +/// the address is already accessed. bool isSafeToLoadUnconditionally(Value *V, unsigned Align, Instruction *ScanFrom, + const DataLayout &DL, const DominatorTree *DT = nullptr, const TargetLibraryInfo *TLI = nullptr); Index: lib/Analysis/Loads.cpp =================================================================== --- lib/Analysis/Loads.cpp +++ lib/Analysis/Loads.cpp @@ -64,18 +64,22 @@ /// load from the pointer. bool llvm::isSafeToLoadUnconditionally(Value *V, unsigned Align, Instruction *ScanFrom, + const DataLayout &DL, const DominatorTree *DT, const TargetLibraryInfo *TLI) { - const DataLayout &DL = ScanFrom->getModule()->getDataLayout(); - // Zero alignment means that the load has the ABI alignment for the target if (Align == 0) Align = DL.getABITypeAlignment(V->getType()->getPointerElementType()); assert(isPowerOf2_32(Align)); - if (isDereferenceableAndAlignedPointer(V, Align, DL, ScanFrom, DT, TLI)) + // If DT is not specified we can't make context-sensitive query + const Instruction* CtxI = DT ? ScanFrom : nullptr; + if (isDereferenceableAndAlignedPointer(V, Align, DL, CtxI, DT, TLI)) return true; + if (!ScanFrom) + return false; + PointerType *AddrTy = cast(V->getType()); uint64_t LoadSize = DL.getTypeStoreSize(AddrTy->getElementType()); Index: lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp =================================================================== --- lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp +++ lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp @@ -852,8 +852,8 @@ if (SelectInst *SI = dyn_cast(Op)) { // load (select (Cond, &V1, &V2)) --> select(Cond, load &V1, load &V2). unsigned Align = LI.getAlignment(); - if (isSafeToLoadUnconditionally(SI->getOperand(1), Align, SI) && - isSafeToLoadUnconditionally(SI->getOperand(2), Align, SI)) { + if (isSafeToLoadUnconditionally(SI->getOperand(1), Align, SI, DL) && + isSafeToLoadUnconditionally(SI->getOperand(2), Align, SI, DL)) { LoadInst *V1 = Builder->CreateLoad(SI->getOperand(1), SI->getOperand(1)->getName()+".val"); LoadInst *V2 = Builder->CreateLoad(SI->getOperand(2), Index: lib/Transforms/Scalar/SROA.cpp =================================================================== --- lib/Transforms/Scalar/SROA.cpp +++ lib/Transforms/Scalar/SROA.cpp @@ -1192,7 +1192,7 @@ // If this pointer is always safe to load, or if we can prove that there // is already a load in the block, then we can move the load to the pred // block. - if (isSafeToLoadUnconditionally(InVal, MaxAlign, TI)) + if (isSafeToLoadUnconditionally(InVal, MaxAlign, TI, DL)) continue; return false; @@ -1270,9 +1270,9 @@ // Both operands to the select need to be dereferencable, either // absolutely (e.g. allocas) or at this point because we can see other // accesses to it. - if (!isSafeToLoadUnconditionally(TValue, LI->getAlignment(), LI)) + if (!isSafeToLoadUnconditionally(TValue, LI->getAlignment(), LI, DL)) return false; - if (!isSafeToLoadUnconditionally(FValue, LI->getAlignment(), LI)) + if (!isSafeToLoadUnconditionally(FValue, LI->getAlignment(), LI, DL)) return false; } Index: lib/Transforms/Scalar/ScalarReplAggregates.cpp =================================================================== --- lib/Transforms/Scalar/ScalarReplAggregates.cpp +++ lib/Transforms/Scalar/ScalarReplAggregates.cpp @@ -1149,10 +1149,10 @@ // Both operands to the select need to be dereferencable, either absolutely // (e.g. allocas) or at this point because we can see other accesses to it. if (!isSafeToLoadUnconditionally(SI->getTrueValue(), LI->getAlignment(), - LI)) + LI, DL)) return false; if (!isSafeToLoadUnconditionally(SI->getFalseValue(), LI->getAlignment(), - LI)) + LI, DL)) return false; } @@ -1225,7 +1225,7 @@ // If this pointer is always safe to load, or if we can prove that there is // already a load in the block, then we can move the load to the pred block. - if (isSafeToLoadUnconditionally(InVal, MaxAlign, Pred->getTerminator())) + if (isSafeToLoadUnconditionally(InVal, MaxAlign, Pred->getTerminator(), DL)) continue; return false; Index: lib/Transforms/Scalar/TailRecursionElimination.cpp =================================================================== --- lib/Transforms/Scalar/TailRecursionElimination.cpp +++ lib/Transforms/Scalar/TailRecursionElimination.cpp @@ -454,9 +454,10 @@ // does not write to memory and the load provably won't trap. // FIXME: Writes to memory only matter if they may alias the pointer // being loaded from. + const DataLayout &DL = L->getModule()->getDataLayout(); if (CI->mayWriteToMemory() || !isSafeToLoadUnconditionally(L->getPointerOperand(), - L->getAlignment(), L)) + L->getAlignment(), L, DL)) return false; } }