Index: include/llvm/CodeGen/GlobalISel/InstructionSelector.h =================================================================== --- include/llvm/CodeGen/GlobalISel/InstructionSelector.h +++ include/llvm/CodeGen/GlobalISel/InstructionSelector.h @@ -138,6 +138,16 @@ /// - MMOIdx - MMO index /// - Size - The size in bytes of the memory access GIM_CheckMemorySizeEqualTo, + + /// Check the address space of the memory access for the given machine memory + /// operand. + /// - InsnID - Instruction ID + /// - MMOIdx - MMO index + /// - NumAddrSpace - Number of valid address spaces + /// - AddrSpaceN - An allowed space of the memory access + /// - AddrSpaceN+1 ... + GIM_CheckMemoryAddressSpace, + /// Check the size of the memory access for the given machine memory operand /// against the size of an operand. /// - InsnID - Instruction ID Index: include/llvm/CodeGen/GlobalISel/InstructionSelectorImpl.h =================================================================== --- include/llvm/CodeGen/GlobalISel/InstructionSelectorImpl.h +++ include/llvm/CodeGen/GlobalISel/InstructionSelectorImpl.h @@ -370,6 +370,45 @@ return false; break; } + case GIM_CheckMemoryAddressSpace: { + int64_t InsnID = MatchTable[CurrentIdx++]; + int64_t MMOIdx = MatchTable[CurrentIdx++]; + // This accepts a list of possible address spaces. + const int NumAddrSpace = MatchTable[CurrentIdx++]; + + if (State.MIs[InsnID]->getNumMemOperands() <= MMOIdx) { + if (handleReject() == RejectAndGiveUp) + return false; + break; + } + + // Need to still jump to the end of the list of address spaces if we find + // a match earlier. + const uint64_t LastIdx = CurrentIdx + NumAddrSpace; + + const MachineMemOperand *MMO + = *(State.MIs[InsnID]->memoperands_begin() + MMOIdx); + const unsigned MMOAddrSpace = MMO->getAddrSpace(); + + bool Success = false; + for (int I = 0; I != NumAddrSpace; ++I) { + unsigned AddrSpace = MatchTable[CurrentIdx++]; + DEBUG_WITH_TYPE( + TgtInstructionSelector::getName(), + dbgs() << "addrspace(" << MMOAddrSpace << ") vs " + << AddrSpace << '\n'); + + if (AddrSpace == MMOAddrSpace) { + Success = true; + break; + } + } + + CurrentIdx = LastIdx; + if (!Success && handleReject() == RejectAndGiveUp) + return false; + break; + } case GIM_CheckMemorySizeEqualTo: { int64_t InsnID = MatchTable[CurrentIdx++]; int64_t MMOIdx = MatchTable[CurrentIdx++]; Index: include/llvm/Target/TargetSelectionDAG.td =================================================================== --- include/llvm/Target/TargetSelectionDAG.td +++ include/llvm/Target/TargetSelectionDAG.td @@ -737,6 +737,10 @@ // cast(N)->isTruncatingStore(); bit IsTruncStore = ?; + // cast(N)->getAddressSpace() == + // If this empty, accept any address space. + list AddressSpaces = ?; + // cast(N)->getOrdering() == AtomicOrdering::Monotonic bit IsAtomicOrderingMonotonic = ?; // cast(N)->getOrdering() == AtomicOrdering::Acquire @@ -762,6 +766,8 @@ // cast(N)->getMemoryVT().getScalarType() == MVT::; // cast(N)->getMemoryVT().getScalarType() == MVT::; ValueType ScalarMemoryVT = ?; + + // TODO: Add alignment } // PatFrag - A version of PatFrags matching only a single fragment. Index: test/TableGen/address-space-patfrags.td =================================================================== --- /dev/null +++ test/TableGen/address-space-patfrags.td @@ -0,0 +1,85 @@ +// RUN: llvm-tblgen -gen-dag-isel -I %p/../../include %s 2>&1 | FileCheck -check-prefix=SDAG %s +// RUN: llvm-tblgen -gen-global-isel -optimize-match-table=false -I %p/../../include %s -o - < %s | FileCheck -check-prefix=GISEL %s + +include "llvm/Target/Target.td" + +def TestTargetInstrInfo : InstrInfo; + + +def TestTarget : Target { + let InstructionSet = TestTargetInstrInfo; +} + +def R0 : Register<"r0"> { let Namespace = "MyTarget"; } +def GPR32 : RegisterClass<"MyTarget", [i32], 32, (add R0)>; + + +// With one address space +def pat_frag_a : PatFrag <(ops node:$ptr), (load node:$ptr), [{}]> { + let AddressSpaces = [ 999 ]; + let IsLoad = 1; // FIXME: Can this be inferred? + let MemoryVT = i32; +} + +// With multiple address spaces +def pat_frag_b : PatFrag <(ops node:$ptr), (load node:$ptr), [{}]> { + let AddressSpaces = [ 123, 455 ]; + let IsLoad = 1; // FIXME: Can this be inferred? + let MemoryVT = i32; +} + +def inst_a : Instruction { + let OutOperandList = (outs GPR32:$dst); + let InOperandList = (ins GPR32:$src); +} + +def inst_b : Instruction { + let OutOperandList = (outs GPR32:$dst); + let InOperandList = (ins GPR32:$src); +} + +// SDAG: case 2: { +// SDAG: // Predicate_pat_frag_a +// SDAG-NEXT: SDNode *N = Node; +// SDAG-NEXT: (void)N; +// SDAG-NEXT: unsigned AddrSpace = cast(N)->getAddressSpace(); + +// SDAG-NEXT: if (AddrSpace != 999) +// SDAG-NEXT: return false; +// SDAG-NEXT: if (cast(N)->getMemoryVT() != MVT::i32) return false; +// SDAG-NEXT: return true; + +// GISEL: GIM_Try, /*On fail goto*//*Label 0*/ 47, // Rule ID 0 // +// GISEL-NEXT: GIM_CheckNumOperands, /*MI*/0, /*Expected*/2, +// GISEL-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_LOAD, +// GISEL-NEXT: GIM_CheckMemorySizeEqualToLLT, /*MI*/0, /*MMO*/0, /*OpIdx*/0, +// GISEL-NEXT: GIM_CheckMemoryAddressSpace, /*MI*/0, /*MMO*/0, /*NumAddrSpace*/1, /*AddrSpace*/999, +// GISEL-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4, +// GISEL-NEXT: GIM_CheckAtomicOrdering, /*MI*/0, /*Order*/(int64_t)AtomicOrdering::NotAtomic, +def : Pat < + (pat_frag_a GPR32:$src), + (inst_a GPR32:$src) +>; + +// SDAG: case 3: { +// SDAG-NEXT: // Predicate_pat_frag_b +// SDAG-NEXT: SDNode *N = Node; +// SDAG-NEXT: (void)N; +// SDAG-NEXT: unsigned AddrSpace = cast(N)->getAddressSpace(); +// SDAG-NEXT: if (AddrSpace != 123 && AddrSpace != 455) +// SDAG-NEXT: return false; +// SDAG-NEXT: if (cast(N)->getMemoryVT() != MVT::i32) return false; +// SDAG-NEXT: return true; + + +// GISEL: GIM_Try, /*On fail goto*//*Label 1*/ 95, // Rule ID 1 // +// GISEL-NEXT: GIM_CheckNumOperands, /*MI*/0, /*Expected*/2, +// GISEL-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_LOAD, +// GISEL-NEXT: GIM_CheckMemorySizeEqualToLLT, /*MI*/0, /*MMO*/0, /*OpIdx*/0, +// GISEL-NEXT: GIM_CheckMemoryAddressSpace, /*MI*/0, /*MMO*/0, /*NumAddrSpace*/2, /*AddrSpace*/123, /*AddrSpace*/455, +// GISEL-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4, +// GISEL-NEXT: GIM_CheckAtomicOrdering, /*MI*/0, /*Order*/(int64_t)AtomicOrdering::NotAtomic, +def : Pat < + (pat_frag_b GPR32:$src), + (inst_b GPR32:$src) +>; Index: utils/TableGen/CodeGenDAGPatterns.h =================================================================== --- utils/TableGen/CodeGenDAGPatterns.h +++ utils/TableGen/CodeGenDAGPatterns.h @@ -593,6 +593,8 @@ /// ValueType record for the memory VT. Record *getScalarMemoryVT() const; + ListInit *getAddressSpaces() const; + // If true, indicates that GlobalISel-based C++ code was supplied. bool hasGISelPredicateCode() const; std::string getGISelPredicateCode() const; Index: utils/TableGen/CodeGenDAGPatterns.cpp =================================================================== --- utils/TableGen/CodeGenDAGPatterns.cpp +++ utils/TableGen/CodeGenDAGPatterns.cpp @@ -954,13 +954,33 @@ } if (isLoad() || isStore() || isAtomic()) { - StringRef SDNodeName = - isLoad() ? "LoadSDNode" : isStore() ? "StoreSDNode" : "AtomicSDNode"; + if (ListInit *AddressSpaces = getAddressSpaces()) { + Code += "unsigned AddrSpace = cast(N)->getAddressSpace();\n" + " if ("; + + bool First = true; + for (Init *Val : AddressSpaces->getValues()) { + if (First) + First = false; + else + Code += " && "; + + IntInit *IntVal = dyn_cast(Val); + if (!IntVal) { + PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), + "AddressSpaces element must be integer"); + } + + Code += "AddrSpace != " + utostr(IntVal->getValue()); + } + + Code += ")\nreturn false;\n"; + } Record *MemoryVT = getMemoryVT(); if (MemoryVT) - Code += ("if (cast<" + SDNodeName + ">(N)->getMemoryVT() != MVT::" + + Code += ("if (cast(N)->getMemoryVT() != MVT::" + MemoryVT->getName() + ") return false;\n") .str(); } @@ -1149,6 +1169,14 @@ return nullptr; return R->getValueAsDef("MemoryVT"); } + +ListInit *TreePredicateFn::getAddressSpaces() const { + Record *R = getOrigPatFragRecord()->getRecord(); + if (R->isValueUnset("AddressSpaces")) + return nullptr; + return R->getValueAsListInit("AddressSpaces"); +} + Record *TreePredicateFn::getScalarMemoryVT() const { Record *R = getOrigPatFragRecord()->getRecord(); if (R->isValueUnset("ScalarMemoryVT")) Index: utils/TableGen/GlobalISelEmitter.cpp =================================================================== --- utils/TableGen/GlobalISelEmitter.cpp +++ utils/TableGen/GlobalISelEmitter.cpp @@ -232,6 +232,27 @@ if (Record *VT = P.getScalarMemoryVT()) Explanation += (" ScalarVT(MemVT)=" + VT->getName()).str(); + if (ListInit *AddrSpaces = P.getAddressSpaces()) { + bool First = true; + raw_string_ostream OS(Explanation); + OS << " AddressSpaces=["; + + for (Init *Val : AddrSpaces->getValues()) { + IntInit *IntVal = dyn_cast(Val); + if (!IntVal) + continue; + + if (First) + First = false; + else + OS << ','; + + OS << IntVal->getValue(); + } + + OS << ']'; + } + if (P.isAtomicOrderingMonotonic()) Explanation += " monotonic"; if (P.isAtomicOrderingAcquire()) @@ -308,6 +329,12 @@ continue; } + if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) { + const ListInit *AddrSpaces = Predicate.getAddressSpaces(); + if (AddrSpaces && !AddrSpaces->empty()) + continue; + } + if (Predicate.isAtomic() && Predicate.getMemoryVT()) continue; @@ -1028,6 +1055,7 @@ IPM_AtomicOrderingMMO, IPM_MemoryLLTSize, IPM_MemoryVsLLTSize, + IPM_MemoryAddressSpace, IPM_GenericPredicate, OPM_SameOperand, OPM_ComplexPattern, @@ -1789,6 +1817,42 @@ } }; +class MemoryAddressSpacePredicateMatcher : public InstructionPredicateMatcher { +protected: + unsigned MMOIdx; + SmallVector AddrSpaces; + +public: + MemoryAddressSpacePredicateMatcher(unsigned InsnVarID, unsigned MMOIdx, + ArrayRef AddrSpaces) + : InstructionPredicateMatcher(IPM_MemoryAddressSpace, InsnVarID), + MMOIdx(MMOIdx), AddrSpaces(AddrSpaces.begin(), AddrSpaces.end()) {} + + static bool classof(const PredicateMatcher *P) { + return P->getKind() == IPM_MemoryAddressSpace; + } + bool isIdentical(const PredicateMatcher &B) const override { + if (!InstructionPredicateMatcher::isIdentical(B)) + return false; + auto *Other = cast(&B); + return MMOIdx == Other->MMOIdx && AddrSpaces == Other->AddrSpaces; + } + + void emitPredicateOpcodes(MatchTable &Table, + RuleMatcher &Rule) const override { + Table << MatchTable::Opcode("GIM_CheckMemoryAddressSpace") + << MatchTable::Comment("MI") << MatchTable::IntValue(InsnVarID) + << MatchTable::Comment("MMO") << MatchTable::IntValue(MMOIdx) + // Encode number of address spaces to expect. + << MatchTable::Comment("NumAddrSpace") + << MatchTable::IntValue(AddrSpaces.size()); + for (unsigned AS : AddrSpaces) + Table << MatchTable::Comment("AddrSpace") << MatchTable::IntValue(AS); + + Table << MatchTable::LineBreak; + } +}; + /// Generates code to check that the size of an MMO is less-than, equal-to, or /// greater than a given LLT. class MemoryVsLLTSizePredicateMatcher : public InstructionPredicateMatcher { @@ -3210,7 +3274,26 @@ continue; } - // G_LOAD is used for both non-extending and any-extending loads. + // An address space check is needed in all contexts if there is one. + if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) { + if (const ListInit *AddrSpaces = Predicate.getAddressSpaces()) { + SmallVector ParsedAddrSpaces; + + for (Init *Val : AddrSpaces->getValues()) { + IntInit *IntVal = dyn_cast(Val); + if (!IntVal) + return failedImport("Address space is not an integer"); + ParsedAddrSpaces.push_back(IntVal->getValue()); + } + + if (!ParsedAddrSpaces.empty()) { + InsnMatcher.addPredicate( + 0, ParsedAddrSpaces); + } + } + } + + // G_LOAD is used for both non-extending and any-extending loads. if (Predicate.isLoad() && Predicate.isNonExtLoad()) { InsnMatcher.addPredicate( 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0);