Index: include/llvm/Analysis/TargetTransformInfo.h =================================================================== --- include/llvm/Analysis/TargetTransformInfo.h +++ include/llvm/Analysis/TargetTransformInfo.h @@ -226,6 +226,15 @@ /// starting with the sources of divergence. bool isSourceOfDivergence(const Value *V) const; + /// For targets with different pointer representations which can be converted + /// with the addrspacecast instruction. Returns the address space ID value for + /// a target's 'generic' address space. The 'generic' address space in this + /// context means a pointer where access of a memory location through a + /// pointer with this address space is expected to be undesirable compared to + /// the same memory location accessed through a pointer with a different + /// address space. + unsigned getUndesirableAddressSpace() const; + /// \brief Test whether calls to a function lower to actual program function /// calls. /// @@ -720,6 +729,7 @@ virtual int getUserCost(const User *U) = 0; virtual bool hasBranchDivergence() = 0; virtual bool isSourceOfDivergence(const Value *V) = 0; + virtual unsigned getUndesirableAddressSpace() = 0; virtual bool isLoweredToCall(const Function *F) = 0; virtual void getUnrollingPreferences(Loop *L, UnrollingPreferences &UP) = 0; virtual bool isLegalAddImmediate(int64_t Imm) = 0; @@ -879,6 +889,11 @@ bool isSourceOfDivergence(const Value *V) override { return Impl.isSourceOfDivergence(V); } + + unsigned getUndesirableAddressSpace() override { + return Impl.getUndesirableAddressSpace(); + } + bool isLoweredToCall(const Function *F) override { return Impl.isLoweredToCall(F); } Index: include/llvm/Analysis/TargetTransformInfoImpl.h =================================================================== --- include/llvm/Analysis/TargetTransformInfoImpl.h +++ include/llvm/Analysis/TargetTransformInfoImpl.h @@ -171,6 +171,10 @@ bool isSourceOfDivergence(const Value *V) { return false; } + unsigned getUndesirableAddressSpace () { + return -1; + } + bool isLoweredToCall(const Function *F) { // FIXME: These should almost certainly not be handled here, and instead // handled with the help of TLI or the target itself. This was largely Index: include/llvm/CodeGen/BasicTTIImpl.h =================================================================== --- include/llvm/CodeGen/BasicTTIImpl.h +++ include/llvm/CodeGen/BasicTTIImpl.h @@ -111,6 +111,11 @@ bool isSourceOfDivergence(const Value *V) { return false; } + unsigned getUndesirableAddressSpace() { + // Return an invalid address space. + return -1; + } + bool isLegalAddImmediate(int64_t imm) { return getTLI()->isLegalAddImmediate(imm); } Index: lib/Analysis/TargetTransformInfo.cpp =================================================================== --- lib/Analysis/TargetTransformInfo.cpp +++ lib/Analysis/TargetTransformInfo.cpp @@ -97,6 +97,10 @@ return TTIImpl->isSourceOfDivergence(V); } +unsigned TargetTransformInfo::getUndesirableAddressSpace() const { + return TTIImpl->getUndesirableAddressSpace(); +} + bool TargetTransformInfo::isLoweredToCall(const Function *F) const { return TTIImpl->isLoweredToCall(F); } Index: lib/Target/NVPTX/NVPTXInferAddressSpaces.cpp =================================================================== --- lib/Target/NVPTX/NVPTXInferAddressSpaces.cpp +++ lib/Target/NVPTX/NVPTXInferAddressSpaces.cpp @@ -92,10 +92,10 @@ #define DEBUG_TYPE "nvptx-infer-addrspace" #include "NVPTX.h" -#include "MCTargetDesc/NVPTXBaseInfo.h" #include "llvm/ADT/DenseSet.h" #include "llvm/ADT/Optional.h" #include "llvm/ADT/SetVector.h" +#include "llvm/Analysis/TargetTransformInfo.h" #include "llvm/IR/Function.h" #include "llvm/IR/InstIterator.h" #include "llvm/IR/Instructions.h" @@ -114,6 +114,10 @@ /// \brief NVPTXInferAddressSpaces class NVPTXInferAddressSpaces: public FunctionPass { + /// Target specific address space which uses of should be replaced if + /// possible. + unsigned GenericAddrSpace; + public: static char ID; @@ -121,6 +125,7 @@ void getAnalysisUsage(AnalysisUsage &AU) const override { AU.setPreservesCFG(); + AU.addRequired(); } bool runOnFunction(Function &F) override; @@ -142,7 +147,18 @@ bool rewriteWithNewAddressSpaces(const std::vector &Postorder, const ValueToAddrSpaceMapTy &InferredAddrSpace, - Function *F); + Function *F) const; + + void appendsGenericAddressExpressionToPostorderStack( + Value *V, std::vector> *PostorderStack, + DenseSet *Visited) const; + + std::vector collectGenericAddressExpressions(Function &F) const; + Value *cloneValueWithNewAddressSpace( + Value *V, unsigned NewAddrSpace, + const ValueToValueMapTy &ValueWithNewAddrSpace, + SmallVectorImpl *UndefUsesToFix) const; + unsigned joinAddressSpaces(unsigned AS1, unsigned AS2) const; }; } // end anonymous namespace @@ -196,13 +212,12 @@ // If V is an unvisited generic address expression, appends V to PostorderStack // and marks it as visited. -static void appendsGenericAddressExpressionToPostorderStack( +void NVPTXInferAddressSpaces::appendsGenericAddressExpressionToPostorderStack( Value *V, std::vector> *PostorderStack, - DenseSet *Visited) { + DenseSet *Visited) const { assert(V->getType()->isPointerTy()); if (isAddressExpression(*V) && - V->getType()->getPointerAddressSpace() == - AddressSpace::ADDRESS_SPACE_GENERIC) { + V->getType()->getPointerAddressSpace() == GenericAddrSpace) { if (Visited->insert(V).second) PostorderStack->push_back(std::make_pair(V, false)); } @@ -210,7 +225,8 @@ // Returns all generic address expressions in function F. The elements are // ordered in postorder. -static std::vector collectGenericAddressExpressions(Function &F) { +std::vector +NVPTXInferAddressSpaces::collectGenericAddressExpressions(Function &F) const { // This function implements a non-recursive postorder traversal of a partial // use-def graph of function F. std::vector> PostorderStack; @@ -378,14 +394,13 @@ // expression whose address space needs to be modified, in postorder. // // See cloneInstructionWithNewAddressSpace for the meaning of UndefUsesToFix. -static Value * -cloneValueWithNewAddressSpace(Value *V, unsigned NewAddrSpace, - const ValueToValueMapTy &ValueWithNewAddrSpace, - SmallVectorImpl *UndefUsesToFix) { +Value *NVPTXInferAddressSpaces::cloneValueWithNewAddressSpace( + Value *V, unsigned NewAddrSpace, + const ValueToValueMapTy &ValueWithNewAddrSpace, + SmallVectorImpl *UndefUsesToFix) const { // All values in Postorder are generic address expressions. assert(isAddressExpression(*V) && - V->getType()->getPointerAddressSpace() == - AddressSpace::ADDRESS_SPACE_GENERIC); + V->getType()->getPointerAddressSpace() == GenericAddrSpace); if (Instruction *I = dyn_cast(V)) { Value *NewV = cloneInstructionWithNewAddressSpace( @@ -405,10 +420,10 @@ // Defines the join operation on the address space lattice (see the file header // comments). -static unsigned joinAddressSpaces(unsigned AS1, unsigned AS2) { - if (AS1 == AddressSpace::ADDRESS_SPACE_GENERIC || - AS2 == AddressSpace::ADDRESS_SPACE_GENERIC) - return AddressSpace::ADDRESS_SPACE_GENERIC; +unsigned NVPTXInferAddressSpaces::joinAddressSpaces(unsigned AS1, + unsigned AS2) const { + if (AS1 == GenericAddrSpace || AS2 == GenericAddrSpace) + return GenericAddrSpace; if (AS1 == ADDRESS_SPACE_UNINITIALIZED) return AS2; @@ -416,13 +431,18 @@ return AS1; // The join of two different specific address spaces is generic. - return AS1 == AS2 ? AS1 : (unsigned)AddressSpace::ADDRESS_SPACE_GENERIC; + return (AS1 == AS2) ? AS1 : GenericAddrSpace; } bool NVPTXInferAddressSpaces::runOnFunction(Function &F) { if (skipFunction(F)) return false; + const TargetTransformInfo &TTI = getAnalysis().getTTI(F); + GenericAddrSpace = TTI.getUndesirableAddressSpace(); + if (GenericAddrSpace == ADDRESS_SPACE_UNINITIALIZED) + return false; + // Collects all generic address expressions in postorder. std::vector Postorder = collectGenericAddressExpressions(F); @@ -473,7 +493,7 @@ // Function updateAddressSpace moves the address space down a lattice // path. Therefore, nothing to do if User is already inferred as // generic (the bottom element in the lattice). - if (Pos->second == AddressSpace::ADDRESS_SPACE_GENERIC) + if (Pos->second == GenericAddrSpace) continue; Worklist.insert(User); @@ -496,12 +516,12 @@ OperandAS = PtrOperand->getType()->getPointerAddressSpace(); NewAS = joinAddressSpaces(NewAS, OperandAS); // join(generic, *) = generic. So we can break if NewAS is already generic. - if (NewAS == AddressSpace::ADDRESS_SPACE_GENERIC) + if (NewAS == GenericAddrSpace) break; } unsigned OldAS = InferredAddrSpace.lookup(&V); - assert(OldAS != AddressSpace::ADDRESS_SPACE_GENERIC); + assert(OldAS != GenericAddrSpace); if (OldAS == NewAS) return None; return NewAS; @@ -509,7 +529,7 @@ bool NVPTXInferAddressSpaces::rewriteWithNewAddressSpaces( const std::vector &Postorder, - const ValueToAddrSpaceMapTy &InferredAddrSpace, Function *F) { + const ValueToAddrSpaceMapTy &InferredAddrSpace, Function *F) const { // For each address expression to be modified, creates a clone of it with its // pointer operands converted to the new address space. Since the pointer // operands are converted, the clone is naturally in the new address space by Index: lib/Target/NVPTX/NVPTXTargetTransformInfo.h =================================================================== --- lib/Target/NVPTX/NVPTXTargetTransformInfo.h +++ lib/Target/NVPTX/NVPTXTargetTransformInfo.h @@ -45,6 +45,10 @@ bool isSourceOfDivergence(const Value *V); + unsigned getUndesirableAddressSpace() const { + return AddressSpace::ADDRESS_SPACE_GENERIC; + } + // Increase the inlining cost threshold by a factor of 5, reflecting that // calls are particularly expensive in NVPTX. unsigned getInliningThresholdMultiplier() { return 5; } Index: test/CodeGen/NVPTX/access-non-generic.ll =================================================================== --- test/CodeGen/NVPTX/access-non-generic.ll +++ test/CodeGen/NVPTX/access-non-generic.ll @@ -1,6 +1,7 @@ ; RUN: llc < %s -march=nvptx -mcpu=sm_20 | FileCheck %s --check-prefix PTX ; RUN: llc < %s -march=nvptx64 -mcpu=sm_20 | FileCheck %s --check-prefix PTX -; RUN: opt < %s -S -nvptx-infer-addrspace | FileCheck %s --check-prefix IR +; RUN: opt -mtriple=nvptx-- < %s -S -nvptx-infer-addrspace | FileCheck %s --check-prefix IR +; RUN: opt -mtriple=nvptx64-- < %s -S -nvptx-infer-addrspace | FileCheck %s --check-prefix IR @array = internal addrspace(3) global [10 x float] zeroinitializer, align 4 @scalar = internal addrspace(3) global float 0.000000e+00, align 4