diff --git a/llvm/test/TableGen/ContextlessPredicates.td b/llvm/test/TableGen/ContextlessPredicates.td new file mode 100644 --- /dev/null +++ b/llvm/test/TableGen/ContextlessPredicates.td @@ -0,0 +1,74 @@ +// RUN: llvm-tblgen -gen-global-isel -I %p/../../include -I %p/Common -optimize-match-table=false %s -o %T/context-non-optimized.cpp +// RUN: FileCheck %s --check-prefixes=CHECK_NOPT -input-file=%T/context-non-optimized.cpp +// RUN: llvm-tblgen -gen-global-isel -I %p/../../include -I %p/Common -optimize-match-table=true %s -o %T/context-optimized.cpp +// RUN: FileCheck %s --check-prefixes=CHECK_OPT -input-file=%T/context-optimized.cpp + + + +include "llvm/Target/Target.td" +include "GlobalISelEmitterCommon.td" + +def test_atomic_op_frag : PatFrag<(ops node:$ptr, node:$val), + (atomic_swap node:$ptr, node:$val)> { + let GISelPredicateCode = [{ return !MRI.use_nodbg_empty(MI.getOperand(0).getReg()); }]; + let IsAtomic = 1; + let MemoryVT = i32; +} + +def INSN : I<(outs GPR32:$dst), (ins GPR32Op:$src1, GPR32Op:$src2), []>; + +def : Pat<(test_atomic_op_frag GPR32:$ptr, GPR32:$val) , + (INSN GPR32:$ptr, GPR32:$val)>; + +// CHECK_NOPT-LABEL: const int64_t *MyTargetInstructionSelector::getMatchTable() const { +// CHECK_NOPT-NEXT: constexpr static int64_t MatchTable0[] = { +// CHECK_NOPT-NEXT: GIM_Try, /*On fail goto*//*Label 0*/ 46, // Rule ID 0 // +// CHECK_NOPT-NEXT: GIM_CheckNumOperands, /*MI*/0, /*Expected*/3, +// CHECK_NOPT-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_ATOMICRMW_XCHG, +// CHECK_NOPT-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4, +// CHECK_NOPT-NEXT: // MIs[0] dst +// CHECK_NOPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/0, /*Type*/GILLT_s32, +// CHECK_NOPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/0, /*RC*/MyTarget::GPR32RegClassID, +// CHECK_NOPT-NEXT: // MIs[0] ptr +// CHECK_NOPT-NEXT: GIM_CheckPointerToAny, /*MI*/0, /*Op*/1, /*SizeInBits*/32, +// CHECK_NOPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/1, /*RC*/MyTarget::GPR32RegClassID, +// CHECK_NOPT-NEXT: // MIs[0] val +// CHECK_NOPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/2, /*Type*/GILLT_s32, +// CHECK_NOPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/2, /*RC*/MyTarget::GPR32RegClassID, +// CHECK_NOPT-NEXT: GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIPFP_MI_Predicate_test_atomic_op_frag, +// CHECK_NOPT-NEXT: // (atomic_swap:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val)<> => (INSN:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val) +// CHECK_NOPT-NEXT: GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/MyTarget::INSN, +// CHECK_NOPT-NEXT: GIR_ConstrainSelectedInstOperands, /*InsnID*/0, +// CHECK_NOPT-NEXT: // GIR_Coverage, 0, +// CHECK_NOPT-NEXT: GIR_Done, +// CHECK_NOPT-NEXT: // Label 0: @46 +// CHECK_NOPT-NEXT: GIM_Reject, +// CHECK_NOPT-NEXT: }; +// CHECK_NOPT-NEXT: return MatchTable0; +// CHECK_NOPT-NEXT: } +// +// + +// CHECK_OPT-LABEL: const int64_t *MyTargetInstructionSelector::getMatchTable() const { +// CHECK_OPT-NEXT: constexpr static int64_t MatchTable0[] = { +// CHECK_OPT-NEXT: GIM_Try, /*On fail goto*//*Label 0*/ 43, // Rule ID 0 // +// CHECK_OPT-NEXT: GIM_CheckOpcode, /*MI*/0, TargetOpcode::G_ATOMICRMW_XCHG, +// CHECK_OPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/0, /*Type*/GILLT_s32, +// CHECK_OPT-NEXT: GIM_CheckType, /*MI*/0, /*Op*/2, /*Type*/GILLT_s32, +// CHECK_OPT-NEXT: GIM_CheckMemorySizeEqualTo, /*MI*/0, /*MMO*/0, /*Size*/4, +// CHECK_OPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/0, /*RC*/MyTarget::GPR32RegClassID, +// CHECK_OPT-NEXT: // MIs[0] ptr +// CHECK_OPT-NEXT: GIM_CheckPointerToAny, /*MI*/0, /*Op*/1, /*SizeInBits*/32, +// CHECK_OPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/1, /*RC*/MyTarget::GPR32RegClassID, +// CHECK_OPT-NEXT: GIM_CheckRegBankForClass, /*MI*/0, /*Op*/2, /*RC*/MyTarget::GPR32RegClassID, +// CHECK_OPT-NEXT: GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIPFP_MI_Predicate_test_atomic_op_frag, +// CHECK_OPT-NEXT: // (atomic_swap:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val)<> => (INSN:{ *:[i32] } GPR32:{ *:[i32] }:$ptr, GPR32:{ *:[i32] }:$val) +// CHECK_OPT-NEXT: GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/MyTarget::INSN, +// CHECK_OPT-NEXT: GIR_ConstrainSelectedInstOperands, /*InsnID*/0, +// CHECK_OPT-NEXT: // GIR_Coverage, 0, +// CHECK_OPT-NEXT: GIR_Done, +// CHECK_OPT-NEXT: // Label 0: @43 +// CHECK_OPT-NEXT: GIM_Reject, +// CHECK_OPT-NEXT: }; +// CHECK_OPT-NEXT: return MatchTable0; +// CHECK_OPT-NEXT: } diff --git a/llvm/utils/TableGen/GlobalISelEmitter.cpp b/llvm/utils/TableGen/GlobalISelEmitter.cpp --- a/llvm/utils/TableGen/GlobalISelEmitter.cpp +++ b/llvm/utils/TableGen/GlobalISelEmitter.cpp @@ -3504,6 +3504,12 @@ Optional inferRegClassFromPattern(TreePatternNode *N); + // Add builtin predicates. + Expected + addBuiltinPredicates(const Record *SrcGIEquivOrNull, + const TreePredicateFn &Predicate, + InstructionMatcher &InsnMatcher, bool &HasAddedMatcher); + public: /// Takes a sequence of \p Rules and group them based on the predicates /// they share. \p MatcherStorage is used as a memory container @@ -3611,6 +3617,147 @@ return Error::success(); } +Expected GlobalISelEmitter::addBuiltinPredicates( + const Record *SrcGIEquivOrNull, const TreePredicateFn &Predicate, + InstructionMatcher &InsnMatcher, bool &HasAddedMatcher) { + if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) { + if (const ListInit *AddrSpaces = Predicate.getAddressSpaces()) { + SmallVector ParsedAddrSpaces; + + for (Init *Val : AddrSpaces->getValues()) { + IntInit *IntVal = dyn_cast(Val); + if (!IntVal) + return failedImport("Address space is not an integer"); + ParsedAddrSpaces.push_back(IntVal->getValue()); + } + + if (!ParsedAddrSpaces.empty()) { + InsnMatcher.addPredicate( + 0, ParsedAddrSpaces); + } + } + + int64_t MinAlign = Predicate.getMinAlignment(); + if (MinAlign > 0) + InsnMatcher.addPredicate(0, MinAlign); + } + + // G_LOAD is used for both non-extending and any-extending loads. + if (Predicate.isLoad() && Predicate.isNonExtLoad()) { + InsnMatcher.addPredicate( + 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0); + return InsnMatcher; + } + if (Predicate.isLoad() && Predicate.isAnyExtLoad()) { + InsnMatcher.addPredicate( + 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0); + return InsnMatcher; + } + + if (Predicate.isStore()) { + if (Predicate.isTruncStore()) { + // FIXME: If MemoryVT is set, we end up with 2 checks for the MMO size. + InsnMatcher.addPredicate( + 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0); + return InsnMatcher; + } + if (Predicate.isNonTruncStore()) { + // We need to check the sizes match here otherwise we could incorrectly + // match truncating stores with non-truncating ones. + InsnMatcher.addPredicate( + 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0); + } + } + + // No check required. We already did it by swapping the opcode. + if (!SrcGIEquivOrNull->isValueUnset("IfSignExtend") && + Predicate.isSignExtLoad()) + return InsnMatcher; + + // No check required. We already did it by swapping the opcode. + if (!SrcGIEquivOrNull->isValueUnset("IfZeroExtend") && + Predicate.isZeroExtLoad()) + return InsnMatcher; + + // No check required. G_STORE by itself is a non-extending store. + if (Predicate.isNonTruncStore()) + return InsnMatcher; + + if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) { + if (Predicate.getMemoryVT() != nullptr) { + Optional MemTyOrNone = + MVTToLLT(getValueType(Predicate.getMemoryVT())); + + if (!MemTyOrNone) + return failedImport("MemVT could not be converted to LLT"); + + // MMO's work in bytes so we must take care of unusual types like i1 + // don't round down. + unsigned MemSizeInBits = + llvm::alignTo(MemTyOrNone->get().getSizeInBits(), 8); + + InsnMatcher.addPredicate(0, + MemSizeInBits / 8); + return InsnMatcher; + } + } + + if (Predicate.isLoad() || Predicate.isStore()) { + // No check required. A G_LOAD/G_STORE is an unindexed load. + if (Predicate.isUnindexed()) + return InsnMatcher; + } + + if (Predicate.isAtomic()) { + if (Predicate.isAtomicOrderingMonotonic()) { + InsnMatcher.addPredicate("Monotonic"); + return InsnMatcher; + } + if (Predicate.isAtomicOrderingAcquire()) { + InsnMatcher.addPredicate("Acquire"); + return InsnMatcher; + } + if (Predicate.isAtomicOrderingRelease()) { + InsnMatcher.addPredicate("Release"); + return InsnMatcher; + } + if (Predicate.isAtomicOrderingAcquireRelease()) { + InsnMatcher.addPredicate( + "AcquireRelease"); + return InsnMatcher; + } + if (Predicate.isAtomicOrderingSequentiallyConsistent()) { + InsnMatcher.addPredicate( + "SequentiallyConsistent"); + return InsnMatcher; + } + } + + if (Predicate.isAtomicOrderingAcquireOrStronger()) { + InsnMatcher.addPredicate( + "Acquire", AtomicOrderingMMOPredicateMatcher::AO_OrStronger); + return InsnMatcher; + } + if (Predicate.isAtomicOrderingWeakerThanAcquire()) { + InsnMatcher.addPredicate( + "Acquire", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan); + return InsnMatcher; + } + + if (Predicate.isAtomicOrderingReleaseOrStronger()) { + InsnMatcher.addPredicate( + "Release", AtomicOrderingMMOPredicateMatcher::AO_OrStronger); + return InsnMatcher; + } + if (Predicate.isAtomicOrderingWeakerThanRelease()) { + InsnMatcher.addPredicate( + "Release", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan); + return InsnMatcher; + } + HasAddedMatcher = false; + return InsnMatcher; +} + Expected GlobalISelEmitter::createAndImportSelDAGMatcher( RuleMatcher &Rule, InstructionMatcher &InsnMatcher, const TreePatternNode *Src, unsigned &TempOpIdx) { @@ -3652,6 +3799,7 @@ for (const TreePredicateCall &Call : Src->getPredicateCalls()) { const TreePredicateFn &Predicate = Call.Fn; + bool HasAddedBuiltinMatcher = true; if (Predicate.isAlwaysTrue()) continue; @@ -3660,150 +3808,19 @@ continue; } - // An address space check is needed in all contexts if there is one. - if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) { - if (const ListInit *AddrSpaces = Predicate.getAddressSpaces()) { - SmallVector ParsedAddrSpaces; - - for (Init *Val : AddrSpaces->getValues()) { - IntInit *IntVal = dyn_cast(Val); - if (!IntVal) - return failedImport("Address space is not an integer"); - ParsedAddrSpaces.push_back(IntVal->getValue()); - } - - if (!ParsedAddrSpaces.empty()) { - InsnMatcher.addPredicate( - 0, ParsedAddrSpaces); - } - } - - int64_t MinAlign = Predicate.getMinAlignment(); - if (MinAlign > 0) - InsnMatcher.addPredicate(0, MinAlign); - } - - // G_LOAD is used for both non-extending and any-extending loads. - if (Predicate.isLoad() && Predicate.isNonExtLoad()) { - InsnMatcher.addPredicate( - 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0); - continue; - } - if (Predicate.isLoad() && Predicate.isAnyExtLoad()) { - InsnMatcher.addPredicate( - 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0); - continue; - } - - if (Predicate.isStore()) { - if (Predicate.isTruncStore()) { - // FIXME: If MemoryVT is set, we end up with 2 checks for the MMO size. - InsnMatcher.addPredicate( - 0, MemoryVsLLTSizePredicateMatcher::LessThan, 0); - continue; - } - if (Predicate.isNonTruncStore()) { - // We need to check the sizes match here otherwise we could incorrectly - // match truncating stores with non-truncating ones. - InsnMatcher.addPredicate( - 0, MemoryVsLLTSizePredicateMatcher::EqualTo, 0); - } - } - - // No check required. We already did it by swapping the opcode. - if (!SrcGIEquivOrNull->isValueUnset("IfSignExtend") && - Predicate.isSignExtLoad()) - continue; - - // No check required. We already did it by swapping the opcode. - if (!SrcGIEquivOrNull->isValueUnset("IfZeroExtend") && - Predicate.isZeroExtLoad()) - continue; - - // No check required. G_STORE by itself is a non-extending store. - if (Predicate.isNonTruncStore()) - continue; - - if (Predicate.isLoad() || Predicate.isStore() || Predicate.isAtomic()) { - if (Predicate.getMemoryVT() != nullptr) { - Optional MemTyOrNone = - MVTToLLT(getValueType(Predicate.getMemoryVT())); - - if (!MemTyOrNone) - return failedImport("MemVT could not be converted to LLT"); - - // MMO's work in bytes so we must take care of unusual types like i1 - // don't round down. - unsigned MemSizeInBits = - llvm::alignTo(MemTyOrNone->get().getSizeInBits(), 8); - - InsnMatcher.addPredicate( - 0, MemSizeInBits / 8); - continue; - } - } - - if (Predicate.isLoad() || Predicate.isStore()) { - // No check required. A G_LOAD/G_STORE is an unindexed load. - if (Predicate.isUnindexed()) - continue; - } - - if (Predicate.isAtomic()) { - if (Predicate.isAtomicOrderingMonotonic()) { - InsnMatcher.addPredicate( - "Monotonic"); - continue; - } - if (Predicate.isAtomicOrderingAcquire()) { - InsnMatcher.addPredicate("Acquire"); - continue; - } - if (Predicate.isAtomicOrderingRelease()) { - InsnMatcher.addPredicate("Release"); - continue; - } - if (Predicate.isAtomicOrderingAcquireRelease()) { - InsnMatcher.addPredicate( - "AcquireRelease"); - continue; - } - if (Predicate.isAtomicOrderingSequentiallyConsistent()) { - InsnMatcher.addPredicate( - "SequentiallyConsistent"); - continue; - } - - if (Predicate.isAtomicOrderingAcquireOrStronger()) { - InsnMatcher.addPredicate( - "Acquire", AtomicOrderingMMOPredicateMatcher::AO_OrStronger); - continue; - } - if (Predicate.isAtomicOrderingWeakerThanAcquire()) { - InsnMatcher.addPredicate( - "Acquire", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan); - continue; - } - - if (Predicate.isAtomicOrderingReleaseOrStronger()) { - InsnMatcher.addPredicate( - "Release", AtomicOrderingMMOPredicateMatcher::AO_OrStronger); - continue; - } - if (Predicate.isAtomicOrderingWeakerThanRelease()) { - InsnMatcher.addPredicate( - "Release", AtomicOrderingMMOPredicateMatcher::AO_WeakerThan); - continue; - } - } + auto InsnMatcherOrError = addBuiltinPredicates( + SrcGIEquivOrNull, Predicate, InsnMatcher, HasAddedBuiltinMatcher); + if (auto Error = InsnMatcherOrError.takeError()) + return std::move(Error); if (Predicate.hasGISelPredicateCode()) { InsnMatcher.addPredicate(Predicate); continue; + } + if (!HasAddedBuiltinMatcher) { + return failedImport("Src pattern child has predicate (" + + explainPredicates(Src) + ")"); } - - return failedImport("Src pattern child has predicate (" + - explainPredicates(Src) + ")"); } bool IsAtomic = false;