Index: lib/Transforms/Scalar/GVN.cpp =================================================================== --- lib/Transforms/Scalar/GVN.cpp +++ lib/Transforms/Scalar/GVN.cpp @@ -1318,13 +1318,17 @@ Value *AvailableValueInBlock::MaterializeAdjustedValue(LoadInst *LI, GVN &gvn) const { + Instruction *InsertPt = BB->getTerminator(); + if (LI->getParent() == BB) + InsertPt = LI; + Value *Res; Type *LoadTy = LI->getType(); const DataLayout &DL = LI->getModule()->getDataLayout(); if (isSimpleValue()) { Res = getSimpleValue(); if (Res->getType() != LoadTy) { - Res = GetStoreValueForLoad(Res, Offset, LoadTy, BB->getTerminator(), DL); + Res = GetStoreValueForLoad(Res, Offset, LoadTy, InsertPt, DL); DEBUG(dbgs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " " << *getSimpleValue() << '\n' @@ -1335,7 +1339,7 @@ if (Load->getType() == LoadTy && Offset == 0) { Res = Load; } else { - Res = GetLoadValueForLoad(Load, Offset, LoadTy, BB->getTerminator(), + Res = GetLoadValueForLoad(Load, Offset, LoadTy, InsertPt, gvn); DEBUG(dbgs() << "GVN COERCED NONLOCAL LOAD:\nOffset: " << Offset << " " @@ -1344,7 +1348,7 @@ } } else if (isMemIntrinValue()) { Res = GetMemInstValueForLoad(getMemIntrinValue(), Offset, LoadTy, - BB->getTerminator(), DL); + InsertPt, DL); DEBUG(dbgs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset << " " << *getMemIntrinValue() << '\n' << *Res << '\n' << "\n\n\n"); @@ -1893,7 +1897,6 @@ // ... to a pointer that has been loaded from before... MemDepResult Dep = MD->getDependency(L); - const DataLayout &DL = L->getModule()->getDataLayout(); // If it is defined in another block, try harder. if (Dep.isNonLocal()) @@ -1911,144 +1914,35 @@ return false; } + // TODO: Invert this change... + LoadDepVect Deps; + Deps.push_back(NonLocalDepResult(L->getParent(), Dep, + L->getPointerOperand())); + AvailValInBlkVect ValuesPerBlock; + UnavailBlkVect UnavailableBlocks; + AnalyzeLoadAvailability(L, Deps, ValuesPerBlock, UnavailableBlocks); - // If we have a clobber and target data is around, see if this is a clobber - // that we can fix up through code synthesis. - if (Dep.isClobber()) { - // Check to see if we have something like this: - // store i32 123, i32* %P - // %A = bitcast i32* %P to i8* - // %B = gep i8* %A, i32 1 - // %C = load i8* %B - // - // We could do that by recognizing if the clobber instructions are obviously - // a common base + constant offset, and if the previous store (or memset) - // completely covers this load. This sort of thing can happen in bitfield - // access code. - Value *AvailVal = nullptr; - if (StoreInst *DepSI = dyn_cast(Dep.getInst())) { - int Offset = AnalyzeLoadFromClobberingStore( - L->getType(), L->getPointerOperand(), DepSI); - if (Offset != -1) - AvailVal = GetStoreValueForLoad(DepSI->getValueOperand(), Offset, - L->getType(), L, DL); - } - - // Check to see if we have something like this: - // load i32* P - // load i8* (P+1) - // if we have this, replace the later with an extraction from the former. - if (LoadInst *DepLI = dyn_cast(Dep.getInst())) { - // If this is a clobber and L is the first instruction in its block, then - // we have the first instruction in the entry block. - if (DepLI == L) - return false; + assert(1 == ValuesPerBlock.size() + UnavailableBlocks.size() && + "only one dependency for FRE!"); - int Offset = AnalyzeLoadFromClobberingLoad( - L->getType(), L->getPointerOperand(), DepLI, DL); - if (Offset != -1) - AvailVal = GetLoadValueForLoad(DepLI, Offset, L->getType(), L, *this); - } - - // If the clobbering value is a memset/memcpy/memmove, see if we can forward - // a value on from it. - if (MemIntrinsic *DepMI = dyn_cast(Dep.getInst())) { - int Offset = AnalyzeLoadFromClobberingMemInst( - L->getType(), L->getPointerOperand(), DepMI, DL); - if (Offset != -1) - AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L, DL); - } - - if (AvailVal) { - DEBUG(dbgs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n' - << *AvailVal << '\n' << *L << "\n\n\n"); - - // Replace the load! - L->replaceAllUsesWith(AvailVal); - if (AvailVal->getType()->getScalarType()->isPointerTy()) - MD->invalidateCachedPointerInfo(AvailVal); + // Eliminate full redundancy. + if (UnavailableBlocks.empty()) { + assert(!ValuesPerBlock.empty() && "as above"); + Value *AvailableValue = + ValuesPerBlock[0].MaterializeAdjustedValue(L, *this); + if (AvailableValue) { + // Do the actual replacement + patchAndReplaceAllUsesWith(L, AvailableValue); markInstructionForDeletion(L); ++NumGVNLoad; + // Tell MDA to rexamine the reused pointer since we might have more + // information after forwarding it. + if (MD && AvailableValue->getType()->getScalarType()->isPointerTy()) + MD->invalidateCachedPointerInfo(AvailableValue); + return true; } - - // If the value isn't available, don't do anything! - DEBUG( - // fast print dep, using operator<< on instruction is too slow. - dbgs() << "GVN: load "; - L->printAsOperand(dbgs()); - Instruction *I = Dep.getInst(); - dbgs() << " is clobbered by " << *I << '\n'; - ); - return false; - } - - assert(Dep.isDef() && "expected from control flow"); - - Instruction *DepInst = Dep.getInst(); - Value *AvailableValue = nullptr; - if (StoreInst *DepSI = dyn_cast(DepInst)) { - Value *StoredVal = DepSI->getValueOperand(); - - // The store and load are to a must-aliased pointer, but they may not - // actually have the same type. See if we know how to reuse the stored - // value (depending on its type). - if (StoredVal->getType() != L->getType()) { - IRBuilder<> Builder(L); - StoredVal = - CoerceAvailableValueToLoadType(StoredVal, L->getType(), Builder, DL); - if (!StoredVal) - return false; - - DEBUG(dbgs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal - << '\n' << *L << "\n\n\n"); - } - - AvailableValue = StoredVal; - } - - if (LoadInst *DepLI = dyn_cast(DepInst)) { - AvailableValue = DepLI; - // The loads are of a must-aliased pointer, but they may not actually have - // the same type. See if we know how to reuse the previously loaded value - // (depending on its type). - if (DepLI->getType() != L->getType()) { - IRBuilder<> Builder(L); - AvailableValue = - CoerceAvailableValueToLoadType(DepLI, L->getType(), Builder, DL); - if (!AvailableValue) - return false; - - DEBUG(dbgs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableValue - << "\n" << *L << "\n\n\n"); - } - } - - // If this load really doesn't depend on anything, then we must be loading an - // undef value. This can happen when loading for a fresh allocation with no - // intervening stores, for example. - if (isa(DepInst) || isMallocLikeFn(DepInst, TLI) || - isLifetimeStart(DepInst)) - AvailableValue = UndefValue::get(L->getType()); - - // If this load follows a calloc (which zero initializes memory), - // then the loaded value is zero - if (isCallocLikeFn(DepInst, TLI)) - AvailableValue = Constant::getNullValue(L->getType()); - - if (AvailableValue) { - // Do the actual replacement - patchAndReplaceAllUsesWith(L, AvailableValue); - markInstructionForDeletion(L); - ++NumGVNLoad; - // Tell MDA to rexamine the reused pointer since we might have more - // information after forwarding it. - if (MD && AvailableValue->getType()->getScalarType()->isPointerTy()) - MD->invalidateCachedPointerInfo(AvailableValue); - - return true; } - return false; }