Skip to content

Commit 80b806b

Browse files
committedSep 12, 2017
Make promoteLoopAccessesToScalars independent of AliasSet [NFC]
Summary: The current promoteLoopAccessesToScalars method receives an AliasSet, but the information used is in fact a list of Value*, known to must alias. Create the list ahead of time to make this method independent of the AliasSet class. While there is no functionality change, this adds overhead for creating a set of Value*, when promotion would normally exit earlier. This is meant to be as a first refactoring step in order to start replacing AliasSetTracker with MemorySSA. And while the end goal is to redesign LICM, the first few steps will focus on adding MemorySSA as an alternative to the AliasSetTracker using most of the existing functionality. Reviewers: mkuper, danielcdh, dberlin Subscribers: sanjoy, chandlerc, gberry, davide, llvm-commits Differential Revision: https://reviews.llvm.org/D35439 llvm-svn: 313075
1 parent 106dd03 commit 80b806b

File tree

2 files changed

+60
-53
lines changed

2 files changed

+60
-53
lines changed
 

‎llvm/include/llvm/Transforms/Utils/LoopUtils.h

+8-6
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616

1717
#include "llvm/ADT/DenseMap.h"
1818
#include "llvm/ADT/Optional.h"
19+
#include "llvm/ADT/SetVector.h"
1920
#include "llvm/ADT/SmallPtrSet.h"
2021
#include "llvm/ADT/SmallVector.h"
2122
#include "llvm/ADT/StringRef.h"
@@ -441,12 +442,13 @@ bool hoistRegion(DomTreeNode *, AliasAnalysis *, LoopInfo *, DominatorTree *,
441442
/// \brief Try to promote memory values to scalars by sinking stores out of
442443
/// the loop and moving loads to before the loop. We do this by looping over
443444
/// the stores in the loop, looking for stores to Must pointers which are
444-
/// loop invariant. It takes AliasSet, Loop exit blocks vector, loop exit blocks
445-
/// insertion point vector, PredIteratorCache, LoopInfo, DominatorTree, Loop,
446-
/// AliasSet information for all instructions of the loop and loop safety
447-
/// information as arguments. Diagnostics is emitted via \p ORE. It returns
448-
/// changed status.
449-
bool promoteLoopAccessesToScalars(AliasSet &, SmallVectorImpl<BasicBlock *> &,
445+
/// loop invariant. It takes a set of must-alias values, Loop exit blocks
446+
/// vector, loop exit blocks insertion point vector, PredIteratorCache,
447+
/// LoopInfo, DominatorTree, Loop, AliasSet information for all instructions
448+
/// of the loop and loop safety information as arguments.
449+
/// Diagnostics is emitted via \p ORE. It returns changed status.
450+
bool promoteLoopAccessesToScalars(const SmallSetVector<Value *, 8> &,
451+
SmallVectorImpl<BasicBlock *> &,
450452
SmallVectorImpl<Instruction *> &,
451453
PredIteratorCache &, LoopInfo *,
452454
DominatorTree *, const TargetLibraryInfo *,

‎llvm/lib/Transforms/Scalar/LICM.cpp

+52-47
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ struct LegacyLICMPass : public LoopPass {
189189
/// Simple Analysis hook. Delete loop L from alias set map.
190190
void deleteAnalysisLoop(Loop *L) override;
191191
};
192-
}
192+
} // namespace
193193

194194
PreservedAnalyses LICMPass::run(Loop &L, LoopAnalysisManager &AM,
195195
LoopStandardAnalysisResults &AR, LPMUpdater &) {
@@ -292,10 +292,26 @@ bool LoopInvariantCodeMotion::runOnLoop(Loop *L, AliasAnalysis *AA,
292292
bool Promoted = false;
293293

294294
// Loop over all of the alias sets in the tracker object.
295-
for (AliasSet &AS : *CurAST)
296-
Promoted |=
297-
promoteLoopAccessesToScalars(AS, ExitBlocks, InsertPts, PIC, LI, DT,
298-
TLI, L, CurAST, &SafetyInfo, ORE);
295+
for (AliasSet &AS : *CurAST) {
296+
// We can promote this alias set if it has a store, if it is a "Must"
297+
// alias set, if the pointer is loop invariant, and if we are not
298+
// eliminating any volatile loads or stores.
299+
if (AS.isForwardingAliasSet() || !AS.isMod() || !AS.isMustAlias() ||
300+
AS.isVolatile() || !L->isLoopInvariant(AS.begin()->getValue()))
301+
continue;
302+
303+
assert(
304+
!AS.empty() &&
305+
"Must alias set should have at least one pointer element in it!");
306+
307+
SmallSetVector<Value *, 8> PointerMustAliases;
308+
for (const auto &ASI : AS)
309+
PointerMustAliases.insert(ASI.getValue());
310+
311+
Promoted |= promoteLoopAccessesToScalars(PointerMustAliases, ExitBlocks,
312+
InsertPts, PIC, LI, DT, TLI, L,
313+
CurAST, &SafetyInfo, ORE);
314+
}
299315

300316
// Once we have promoted values across the loop body we have to
301317
// recursively reform LCSSA as any nested loop may now have values defined
@@ -383,8 +399,8 @@ bool llvm::sinkRegion(DomTreeNode *N, AliasAnalysis *AA, LoopInfo *LI,
383399
for (BasicBlock::iterator II = BB->end(); II != BB->begin();) {
384400
Instruction &I = *--II;
385401

386-
// If the instruction is dead, we would try to sink it because it isn't used
387-
// in the loop, instead, just delete it.
402+
// If the instruction is dead, we would try to sink it because it isn't
403+
// used in the loop, instead, just delete it.
388404
if (isInstructionTriviallyDead(&I, TLI)) {
389405
DEBUG(dbgs() << "LICM deleting dead inst: " << I << '\n');
390406
++II;
@@ -509,7 +525,8 @@ void llvm::computeLoopSafetyInfo(LoopSafetyInfo *SafetyInfo, Loop *CurLoop) {
509525
// Iterate over loop instructions and compute safety info.
510526
// Skip header as it has been computed and stored in HeaderMayThrow.
511527
// The first block in loopinfo.Blocks is guaranteed to be the header.
512-
assert(Header == *CurLoop->getBlocks().begin() && "First block must be header");
528+
assert(Header == *CurLoop->getBlocks().begin() &&
529+
"First block must be header");
513530
for (Loop::block_iterator BB = std::next(CurLoop->block_begin()),
514531
BBE = CurLoop->block_end();
515532
(BB != BBE) && !SafetyInfo->MayThrow; ++BB)
@@ -527,9 +544,9 @@ void llvm::computeLoopSafetyInfo(LoopSafetyInfo *SafetyInfo, Loop *CurLoop) {
527544
}
528545

529546
// Return true if LI is invariant within scope of the loop. LI is invariant if
530-
// CurLoop is dominated by an invariant.start representing the same memory location
531-
// and size as the memory location LI loads from, and also the invariant.start
532-
// has no uses.
547+
// CurLoop is dominated by an invariant.start representing the same memory
548+
// location and size as the memory location LI loads from, and also the
549+
// invariant.start has no uses.
533550
static bool isLoadInvariantInLoop(LoadInst *LI, DominatorTree *DT,
534551
Loop *CurLoop) {
535552
Value *Addr = LI->getOperand(0);
@@ -950,7 +967,7 @@ static bool isSafeToExecuteUnconditionally(Instruction &Inst,
950967
namespace {
951968
class LoopPromoter : public LoadAndStorePromoter {
952969
Value *SomePtr; // Designated pointer to store to.
953-
SmallPtrSetImpl<Value *> &PointerMustAliases;
970+
const SmallSetVector<Value *, 8> &PointerMustAliases;
954971
SmallVectorImpl<BasicBlock *> &LoopExitBlocks;
955972
SmallVectorImpl<Instruction *> &LoopInsertPts;
956973
PredIteratorCache &PredCache;
@@ -978,15 +995,15 @@ class LoopPromoter : public LoadAndStorePromoter {
978995

979996
public:
980997
LoopPromoter(Value *SP, ArrayRef<const Instruction *> Insts, SSAUpdater &S,
981-
SmallPtrSetImpl<Value *> &PMA,
998+
const SmallSetVector<Value *, 8> &PMA,
982999
SmallVectorImpl<BasicBlock *> &LEB,
9831000
SmallVectorImpl<Instruction *> &LIP, PredIteratorCache &PIC,
9841001
AliasSetTracker &ast, LoopInfo &li, DebugLoc dl, int alignment,
9851002
bool UnorderedAtomic, const AAMDNodes &AATags)
9861003
: LoadAndStorePromoter(Insts, S), SomePtr(SP), PointerMustAliases(PMA),
9871004
LoopExitBlocks(LEB), LoopInsertPts(LIP), PredCache(PIC), AST(ast),
9881005
LI(li), DL(std::move(dl)), Alignment(alignment),
989-
UnorderedAtomic(UnorderedAtomic),AATags(AATags) {}
1006+
UnorderedAtomic(UnorderedAtomic), AATags(AATags) {}
9901007

9911008
bool isInstInList(Instruction *I,
9921009
const SmallVectorImpl<Instruction *> &) const override {
@@ -1025,15 +1042,16 @@ class LoopPromoter : public LoadAndStorePromoter {
10251042
}
10261043
void instructionDeleted(Instruction *I) const override { AST.deleteValue(I); }
10271044
};
1028-
} // end anon namespace
1045+
} // namespace
10291046

10301047
/// Try to promote memory values to scalars by sinking stores out of the
10311048
/// loop and moving loads to before the loop. We do this by looping over
10321049
/// the stores in the loop, looking for stores to Must pointers which are
10331050
/// loop invariant.
10341051
///
10351052
bool llvm::promoteLoopAccessesToScalars(
1036-
AliasSet &AS, SmallVectorImpl<BasicBlock *> &ExitBlocks,
1053+
const SmallSetVector<Value *, 8> &PointerMustAliases,
1054+
SmallVectorImpl<BasicBlock *> &ExitBlocks,
10371055
SmallVectorImpl<Instruction *> &InsertPts, PredIteratorCache &PIC,
10381056
LoopInfo *LI, DominatorTree *DT, const TargetLibraryInfo *TLI,
10391057
Loop *CurLoop, AliasSetTracker *CurAST, LoopSafetyInfo *SafetyInfo,
@@ -1043,17 +1061,7 @@ bool llvm::promoteLoopAccessesToScalars(
10431061
CurAST != nullptr && SafetyInfo != nullptr &&
10441062
"Unexpected Input to promoteLoopAccessesToScalars");
10451063

1046-
// We can promote this alias set if it has a store, if it is a "Must" alias
1047-
// set, if the pointer is loop invariant, and if we are not eliminating any
1048-
// volatile loads or stores.
1049-
if (AS.isForwardingAliasSet() || !AS.isMod() || !AS.isMustAlias() ||
1050-
AS.isVolatile() || !CurLoop->isLoopInvariant(AS.begin()->getValue()))
1051-
return false;
1052-
1053-
assert(!AS.empty() &&
1054-
"Must alias set should have at least one pointer element in it!");
1055-
1056-
Value *SomePtr = AS.begin()->getValue();
1064+
Value *SomePtr = *PointerMustAliases.begin();
10571065
BasicBlock *Preheader = CurLoop->getLoopPreheader();
10581066

10591067
// It isn't safe to promote a load/store from the loop if the load/store is
@@ -1082,8 +1090,8 @@ bool llvm::promoteLoopAccessesToScalars(
10821090
// is safe (i.e. proving dereferenceability on all paths through the loop). We
10831091
// can use any access within the alias set to prove dereferenceability,
10841092
// since they're all must alias.
1085-
//
1086-
// There are two ways establish (p2):
1093+
//
1094+
// There are two ways establish (p2):
10871095
// a) Prove the location is thread-local. In this case the memory model
10881096
// requirement does not apply, and stores are safe to insert.
10891097
// b) Prove a store dominates every exit block. In this case, if an exit
@@ -1097,13 +1105,12 @@ bool llvm::promoteLoopAccessesToScalars(
10971105
bool SafeToInsertStore = false;
10981106

10991107
SmallVector<Instruction *, 64> LoopUses;
1100-
SmallPtrSet<Value *, 4> PointerMustAliases;
11011108

11021109
// We start with an alignment of one and try to find instructions that allow
11031110
// us to prove better alignment.
11041111
unsigned Alignment = 1;
11051112
// Keep track of which types of access we see
1106-
bool SawUnorderedAtomic = false;
1113+
bool SawUnorderedAtomic = false;
11071114
bool SawNotAtomic = false;
11081115
AAMDNodes AATags;
11091116

@@ -1124,15 +1131,16 @@ bool llvm::promoteLoopAccessesToScalars(
11241131
// If this is a base pointer we do not understand, simply bail.
11251132
// We only handle alloca and return value from alloc-like fn right now.
11261133
if (!isa<AllocaInst>(Object)) {
1127-
if (!isAllocLikeFn(Object, TLI))
1128-
return false;
1129-
// If this is an alloc like fn. There are more constraints we need to verify.
1130-
// More specifically, we must make sure that the pointer can not escape.
1134+
if (!isAllocLikeFn(Object, TLI))
1135+
return false;
1136+
// If this is an alloc like fn. There are more constraints we need to
1137+
// verify. More specifically, we must make sure that the pointer can not
1138+
// escape.
11311139
//
1132-
// NOTE: PointerMayBeCaptured is not enough as the pointer may have escaped
1133-
// even though its not captured by the enclosing function. Standard allocation
1134-
// functions like malloc, calloc, and operator new return values which can
1135-
// be assumed not to have previously escaped.
1140+
// NOTE: PointerMayBeCaptured is not enough as the pointer may have
1141+
// escaped even though its not captured by the enclosing function.
1142+
// Standard allocation functions like malloc, calloc, and operator new
1143+
// return values which can be assumed not to have previously escaped.
11361144
if (PointerMayBeCaptured(Object, true, true))
11371145
return false;
11381146
IsKnownNonEscapingObject = true;
@@ -1142,10 +1150,7 @@ bool llvm::promoteLoopAccessesToScalars(
11421150
// Check that all of the pointers in the alias set have the same type. We
11431151
// cannot (yet) promote a memory location that is loaded and stored in
11441152
// different sizes. While we are at it, collect alignment and AA info.
1145-
for (const auto &ASI : AS) {
1146-
Value *ASIV = ASI.getValue();
1147-
PointerMustAliases.insert(ASIV);
1148-
1153+
for (Value *ASIV : PointerMustAliases) {
11491154
// Check that all of the pointers in the alias set have the same type. We
11501155
// cannot (yet) promote a memory location that is loaded and stored in
11511156
// different sizes.
@@ -1164,7 +1169,7 @@ bool llvm::promoteLoopAccessesToScalars(
11641169
assert(!Load->isVolatile() && "AST broken");
11651170
if (!Load->isUnordered())
11661171
return false;
1167-
1172+
11681173
SawUnorderedAtomic |= Load->isAtomic();
11691174
SawNotAtomic |= !Load->isAtomic();
11701175

@@ -1257,8 +1262,8 @@ bool llvm::promoteLoopAccessesToScalars(
12571262
else {
12581263
Value *Object = GetUnderlyingObject(SomePtr, MDL);
12591264
SafeToInsertStore =
1260-
(isAllocLikeFn(Object, TLI) || isa<AllocaInst>(Object)) &&
1261-
!PointerMayBeCaptured(Object, true, true);
1265+
(isAllocLikeFn(Object, TLI) || isa<AllocaInst>(Object)) &&
1266+
!PointerMayBeCaptured(Object, true, true);
12621267
}
12631268
}
12641269

@@ -1350,7 +1355,7 @@ LoopInvariantCodeMotion::collectAliasInfoForLoop(Loop *L, LoopInfo *LI,
13501355
auto mergeLoop = [&](Loop *L) {
13511356
// Loop over the body of this loop, looking for calls, invokes, and stores.
13521357
for (BasicBlock *BB : L->blocks())
1353-
CurAST->add(*BB); // Incorporate the specified basic block
1358+
CurAST->add(*BB); // Incorporate the specified basic block
13541359
};
13551360

13561361
// Add everything from the sub loops that are no longer directly available.

0 commit comments

Comments
 (0)
Please sign in to comment.