Index: include/llvm/Target/TargetSelectionDAG.td =================================================================== --- include/llvm/Target/TargetSelectionDAG.td +++ include/llvm/Target/TargetSelectionDAG.td @@ -649,6 +649,38 @@ code PredicateCode = pred; code ImmediateCode = [{}]; SDNodeXForm OperandTransform = xform; + + // Define a few pre-packaged predicates. This helps GlobalISel import + // existing rules from SelectionDAG for many common cases. + // They will be tested prior to the code in pred and must not be used in + // ImmLeaf and its subclasses. + + // cast(N)->getAddressingMode() == ISD::UNINDEXED; + bit IsUnindexedLoad = ?; + // cast(N)->getAddressingMode() == ISD::UNINDEXED; + bit IsUnindexedStore = ?; + + // cast(N)->getExtensionType() != ISD::NON_EXTLOAD + bit IsNonExtLoad = ?; + // cast(N)->getExtensionType() == ISD::EXTLOAD; + bit IsAnyExtLoad = ?; + // cast(N)->getExtensionType() == ISD::SEXTLOAD; + bit IsSignExtLoad = ?; + // cast(N)->getExtensionType() == ISD::ZEXTLOAD; + bit IsZeroExtLoad = ?; + // !cast(N)->isTruncatingStore(); + bit IsNonTruncStore = ?; + // cast(N)->isTruncatingStore(); + bit IsTruncStore = ?; + + // cast(N)->getMemoryVT() == MVT::; + ValueType LoadMemoryVT = ?; + // cast(N)->getMemoryVT().getScalarType() == MVT::; + ValueType LoadScalarMemoryVT = ?; + // cast(N)->getMemoryVT() == MVT::; + ValueType StoreMemoryVT = ?; + // cast(N)->getMemoryVT().getScalarType() == MVT::; + ValueType StoreScalarMemoryVT = ?; } // OutPatFrag is a pattern fragment that is used as part of an output pattern @@ -731,170 +763,170 @@ def null_frag : SDPatternOperator; // load fragments. -def unindexedload : PatFrag<(ops node:$ptr), (ld node:$ptr), [{ - return cast(N)->getAddressingMode() == ISD::UNINDEXED; -}]>; -def load : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{ - return cast(N)->getExtensionType() == ISD::NON_EXTLOAD; -}]>; +def unindexedload : PatFrag<(ops node:$ptr), (ld node:$ptr)> { + let IsUnindexedLoad = 1; +} +def load : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> { + let IsNonExtLoad = 1; +} // extending load fragments. -def extload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{ - return cast(N)->getExtensionType() == ISD::EXTLOAD; -}]>; -def sextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{ - return cast(N)->getExtensionType() == ISD::SEXTLOAD; -}]>; -def zextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr), [{ - return cast(N)->getExtensionType() == ISD::ZEXTLOAD; -}]>; +def extload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> { + let IsAnyExtLoad = 1; +} +def sextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> { + let IsSignExtLoad = 1; +} +def zextload : PatFrag<(ops node:$ptr), (unindexedload node:$ptr)> { + let IsZeroExtLoad = 1; +} -def extloadi1 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i1; -}]>; -def extloadi8 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i8; -}]>; -def extloadi16 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i16; -}]>; -def extloadi32 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i32; -}]>; -def extloadf32 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::f32; -}]>; -def extloadf64 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::f64; -}]>; +def extloadi1 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadMemoryVT = i1; +} +def extloadi8 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadMemoryVT = i8; +} +def extloadi16 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadMemoryVT = i16; +} +def extloadi32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadMemoryVT = i32; +} +def extloadf32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadMemoryVT = f32; +} +def extloadf64 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadMemoryVT = f64; +} -def sextloadi1 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i1; -}]>; -def sextloadi8 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i8; -}]>; -def sextloadi16 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i16; -}]>; -def sextloadi32 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i32; -}]>; +def sextloadi1 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadMemoryVT = i1; +} +def sextloadi8 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadMemoryVT = i8; +} +def sextloadi16 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadMemoryVT = i16; +} +def sextloadi32 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadMemoryVT = i32; +} -def zextloadi1 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i1; -}]>; -def zextloadi8 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i8; -}]>; -def zextloadi16 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i16; -}]>; -def zextloadi32 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i32; -}]>; +def zextloadi1 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadMemoryVT = i1; +} +def zextloadi8 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadMemoryVT = i8; +} +def zextloadi16 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadMemoryVT = i16; +} +def zextloadi32 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadMemoryVT = i32; +} -def extloadvi1 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i1; -}]>; -def extloadvi8 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i8; -}]>; -def extloadvi16 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i16; -}]>; -def extloadvi32 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i32; -}]>; -def extloadvf32 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::f32; -}]>; -def extloadvf64 : PatFrag<(ops node:$ptr), (extload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::f64; -}]>; +def extloadvi1 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadScalarMemoryVT = i1; +} +def extloadvi8 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadScalarMemoryVT = i8; +} +def extloadvi16 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadScalarMemoryVT = i16; +} +def extloadvi32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadScalarMemoryVT = i32; +} +def extloadvf32 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadScalarMemoryVT = f32; +} +def extloadvf64 : PatFrag<(ops node:$ptr), (extload node:$ptr)> { + let LoadScalarMemoryVT = f64; +} -def sextloadvi1 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i1; -}]>; -def sextloadvi8 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i8; -}]>; -def sextloadvi16 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i16; -}]>; -def sextloadvi32 : PatFrag<(ops node:$ptr), (sextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i32; -}]>; +def sextloadvi1 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadScalarMemoryVT = i1; +} +def sextloadvi8 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadScalarMemoryVT = i8; +} +def sextloadvi16 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadScalarMemoryVT = i16; +} +def sextloadvi32 : PatFrag<(ops node:$ptr), (sextload node:$ptr)> { + let LoadScalarMemoryVT = i32; +} -def zextloadvi1 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i1; -}]>; -def zextloadvi8 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i8; -}]>; -def zextloadvi16 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i16; -}]>; -def zextloadvi32 : PatFrag<(ops node:$ptr), (zextload node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i32; -}]>; +def zextloadvi1 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadScalarMemoryVT = i1; +} +def zextloadvi8 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadScalarMemoryVT = i8; +} +def zextloadvi16 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadScalarMemoryVT = i16; +} +def zextloadvi32 : PatFrag<(ops node:$ptr), (zextload node:$ptr)> { + let LoadScalarMemoryVT = i32; +} // store fragments. def unindexedstore : PatFrag<(ops node:$val, node:$ptr), - (st node:$val, node:$ptr), [{ - return cast(N)->getAddressingMode() == ISD::UNINDEXED; -}]>; + (st node:$val, node:$ptr)> { + let IsUnindexedStore = 1; +} def store : PatFrag<(ops node:$val, node:$ptr), - (unindexedstore node:$val, node:$ptr), [{ - return !cast(N)->isTruncatingStore(); -}]>; + (unindexedstore node:$val, node:$ptr)> { + let IsNonTruncStore = 1; +} // truncstore fragments. def truncstore : PatFrag<(ops node:$val, node:$ptr), - (unindexedstore node:$val, node:$ptr), [{ - return cast(N)->isTruncatingStore(); -}]>; + (unindexedstore node:$val, node:$ptr)> { + let IsTruncStore = 1; +} def truncstorei8 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i8; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreMemoryVT = i8; +} def truncstorei16 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i16; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreMemoryVT = i16; +} def truncstorei32 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::i32; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreMemoryVT = i32; +} def truncstoref32 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::f32; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreMemoryVT = f32; +} def truncstoref64 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT() == MVT::f64; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreMemoryVT = f64; +} def truncstorevi8 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i8; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreScalarMemoryVT = i8; +} def truncstorevi16 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i16; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreScalarMemoryVT = i16; +} def truncstorevi32 : PatFrag<(ops node:$val, node:$ptr), - (truncstore node:$val, node:$ptr), [{ - return cast(N)->getMemoryVT().getScalarType() == MVT::i32; -}]>; + (truncstore node:$val, node:$ptr)> { + let StoreScalarMemoryVT = i32; +} // indexed store fragments. def istore : PatFrag<(ops node:$val, node:$base, node:$offset), - (ist node:$val, node:$base, node:$offset), [{ - return !cast(N)->isTruncatingStore(); -}]>; + (ist node:$val, node:$base, node:$offset)> { + let IsNonTruncStore = 1; +} def pre_store : PatFrag<(ops node:$val, node:$base, node:$offset), (istore node:$val, node:$base, node:$offset), [{ @@ -903,34 +935,34 @@ }]>; def itruncstore : PatFrag<(ops node:$val, node:$base, node:$offset), - (ist node:$val, node:$base, node:$offset), [{ - return cast(N)->isTruncatingStore(); -}]>; + (ist node:$val, node:$base, node:$offset)> { + let IsTruncStore = 1; +} def pre_truncst : PatFrag<(ops node:$val, node:$base, node:$offset), (itruncstore node:$val, node:$base, node:$offset), [{ ISD::MemIndexedMode AM = cast(N)->getAddressingMode(); return AM == ISD::PRE_INC || AM == ISD::PRE_DEC; }]>; def pre_truncsti1 : PatFrag<(ops node:$val, node:$base, node:$offset), - (pre_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i1; -}]>; + (pre_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i1; +} def pre_truncsti8 : PatFrag<(ops node:$val, node:$base, node:$offset), - (pre_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i8; -}]>; + (pre_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i8; +} def pre_truncsti16 : PatFrag<(ops node:$val, node:$base, node:$offset), - (pre_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i16; -}]>; + (pre_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i16; +} def pre_truncsti32 : PatFrag<(ops node:$val, node:$base, node:$offset), - (pre_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i32; -}]>; + (pre_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i32; +} def pre_truncstf32 : PatFrag<(ops node:$val, node:$base, node:$offset), - (pre_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::f32; -}]>; + (pre_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = f32; +} def post_store : PatFrag<(ops node:$val, node:$ptr, node:$offset), (istore node:$val, node:$ptr, node:$offset), [{ @@ -944,25 +976,25 @@ return AM == ISD::POST_INC || AM == ISD::POST_DEC; }]>; def post_truncsti1 : PatFrag<(ops node:$val, node:$base, node:$offset), - (post_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i1; -}]>; + (post_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i1; +} def post_truncsti8 : PatFrag<(ops node:$val, node:$base, node:$offset), - (post_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i8; -}]>; + (post_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i8; +} def post_truncsti16 : PatFrag<(ops node:$val, node:$base, node:$offset), - (post_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i16; -}]>; + (post_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i16; +} def post_truncsti32 : PatFrag<(ops node:$val, node:$base, node:$offset), - (post_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::i32; -}]>; + (post_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = i32; +} def post_truncstf32 : PatFrag<(ops node:$val, node:$base, node:$offset), - (post_truncst node:$val, node:$base, node:$offset), [{ - return cast(N)->getMemoryVT() == MVT::f32; -}]>; + (post_truncst node:$val, node:$base, node:$offset)> { + let StoreMemoryVT = f32; +} // nontemporal store fragments. def nontemporalstore : PatFrag<(ops node:$val, node:$ptr), Index: utils/TableGen/CodeGenDAGPatterns.h =================================================================== --- utils/TableGen/CodeGenDAGPatterns.h +++ utils/TableGen/CodeGenDAGPatterns.h @@ -320,6 +320,38 @@ /// Get the opcode used to check this predicate. std::string getGlobalISelMatchOpcode() const; + /// Is this predicate the predefined unindexed load predicate? + bool isUnindexedLoad() const; + /// Is this predicate the predefined unindexed store predicate? + bool isUnindexedStore() const; + /// Is this predicate the predefined non-extending load predicate? + bool isNonExtLoad() const; + /// Is this predicate the predefined any-extend load predicate? + bool isAnyExtLoad() const; + /// Is this predicate the predefined sign-extend load predicate? + bool isSignExtLoad() const; + /// Is this predicate the predefined zero-extend load predicate? + bool isZeroExtLoad() const; + /// Is this predicate the predefined non-truncating store predicate? + bool isNonTruncStore() const; + /// Is this predicate the predefined truncating store predicate? + bool isTruncStore() const; + + /// If non-null, indicates that this predicate is a predefined memory VT + /// predicate for a load and returns the ValueType record for the memory VT. + Record *getLoadMemoryVT() const; + /// If non-null, indicates that this predicate is a predefined memory VT + /// predicate (checking only the scalar type) for load and returns the + /// ValueType record for the memory VT. + Record *getLoadScalarMemoryVT() const; + /// If non-null, indicates that this predicate is a predefined memory VT + /// predicate for a store and returns the ValueType record for the memory VT. + Record *getStoreMemoryVT() const; + /// If non-null, indicates that this predicate is a predefined memory VT + /// predicate (checking only the scalar type) for a store and returns the + /// ValueType record for the memory VT. + Record *getStoreScalarMemoryVT() const; + private: std::string getPredCode() const; std::string getImmCode() const; @@ -329,6 +361,8 @@ /// Get a string that describes the type returned by getImmType() but is /// usable as part of an identifier. std::string getImmTypeIdentifier() const; + + bool isPredefinedPredicateEnabled(StringRef Field) const; }; Index: utils/TableGen/CodeGenDAGPatterns.cpp =================================================================== --- utils/TableGen/CodeGenDAGPatterns.cpp +++ utils/TableGen/CodeGenDAGPatterns.cpp @@ -763,7 +763,69 @@ } std::string TreePredicateFn::getPredCode() const { - return PatFragRec->getRecord()->getValueAsString("PredicateCode"); + std::string Code = ""; + + if (isUnindexedLoad() && isUnindexedStore()) + PrintFatalError( + getOrigPatFragRecord()->getRecord()->getLoc(), + "IsUnindexedLoad and IsUnindexedStore are mutually exclusive"); + if (isUnindexedLoad()) + Code += "if (cast(N)->getAddressingMode() != ISD::UNINDEXED) return false;\n"; + if (isUnindexedStore()) + Code += "if (cast(N)->getAddressingMode() != ISD::UNINDEXED) return false;\n"; + + if ((isNonExtLoad() + isAnyExtLoad() + isSignExtLoad() + isZeroExtLoad() + + isNonTruncStore() + isTruncStore()) > 1) + PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), + "IsNonExtLoad, IsAnyExtLoad, IsSignExtLoad, IsZeroExtLoad, " + "IsNonTruncStore, and IsTruncStore are mutually exclusive"); + if (isNonExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::NON_EXTLOAD) return false;\n"; + if (isAnyExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::EXTLOAD) return false;\n"; + if (isSignExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::SEXTLOAD) return false;\n"; + if (isZeroExtLoad()) + Code += "if (cast(N)->getExtensionType() != ISD::ZEXTLOAD) return false;\n"; + if (isNonTruncStore()) + Code += " if (cast(N)->isTruncatingStore()) return false;\n"; + if (isTruncStore()) + Code += " if (!cast(N)->isTruncatingStore()) return false;\n"; + + Record *LoadMemoryVT = getLoadMemoryVT(); + Record *LoadScalarMemoryVT = getLoadScalarMemoryVT(); + Record *StoreMemoryVT = getStoreMemoryVT(); + Record *StoreScalarMemoryVT = getStoreScalarMemoryVT(); + + if ((LoadMemoryVT || LoadScalarMemoryVT) && (StoreMemoryVT || StoreScalarMemoryVT)) + PrintFatalError(getOrigPatFragRecord()->getRecord()->getLoc(), + "Root of PatFrag cannot be both a load and a store"); + if (LoadMemoryVT) + Code += ("if (cast(N)->getMemoryVT() != MVT::" + + LoadMemoryVT->getName() + ") return false;\n") + .str(); + if (LoadScalarMemoryVT) + Code += ("if (cast(N)->getMemoryVT().getScalarType() != MVT::" + + LoadScalarMemoryVT->getName() + ") return false;\n") + .str(); + if (StoreMemoryVT) + Code += ("if (cast(N)->getMemoryVT() != MVT::" + + StoreMemoryVT->getName() + ") return false;\n") + .str(); + if (StoreScalarMemoryVT) + Code += + ("if (cast(N)->getMemoryVT().getScalarType() != MVT::" + + StoreScalarMemoryVT->getName() + ") return false;\n") + .str(); + + std::string PredicateCode = PatFragRec->getRecord()->getValueAsString("PredicateCode"); + + Code += PredicateCode; + + if (PredicateCode.empty() && !Code.empty()) + Code += "return true;\n"; + + return Code; } std::string TreePredicateFn::getImmCode() const { @@ -781,6 +843,60 @@ Unset); } +bool TreePredicateFn::isPredefinedPredicateEnabled(StringRef Field) const { + bool Unset; + return getOrigPatFragRecord()->getRecord()->getValueAsBitOrUnset(Field, + Unset); +} +bool TreePredicateFn::isUnindexedLoad() const { + return isPredefinedPredicateEnabled("IsUnindexedLoad"); +} +bool TreePredicateFn::isUnindexedStore() const { + return isPredefinedPredicateEnabled("IsUnindexedStore"); +} +bool TreePredicateFn::isNonExtLoad() const { + return isPredefinedPredicateEnabled("IsNonExtLoad"); +} +bool TreePredicateFn::isAnyExtLoad() const { + return isPredefinedPredicateEnabled("IsAnyExtLoad"); +} +bool TreePredicateFn::isSignExtLoad() const { + return isPredefinedPredicateEnabled("IsSignExtLoad"); +} +bool TreePredicateFn::isZeroExtLoad() const { + return isPredefinedPredicateEnabled("IsZeroExtLoad"); +} +bool TreePredicateFn::isNonTruncStore() const { + return isPredefinedPredicateEnabled("IsNonTruncStore"); +} +bool TreePredicateFn::isTruncStore() const { + return isPredefinedPredicateEnabled("IsTruncStore"); +} +Record *TreePredicateFn::getLoadMemoryVT() const { + Record *R = getOrigPatFragRecord()->getRecord(); + if (R->isValueUnset("LoadMemoryVT")) + return nullptr; + return R->getValueAsDef("LoadMemoryVT"); +} +Record *TreePredicateFn::getLoadScalarMemoryVT() const { + Record *R = getOrigPatFragRecord()->getRecord(); + if (R->isValueUnset("LoadScalarMemoryVT")) + return nullptr; + return R->getValueAsDef("LoadScalarMemoryVT"); +} +Record *TreePredicateFn::getStoreMemoryVT() const { + Record *R = getOrigPatFragRecord()->getRecord(); + if (R->isValueUnset("StoreMemoryVT")) + return nullptr; + return R->getValueAsDef("StoreMemoryVT"); +} +Record *TreePredicateFn::getStoreScalarMemoryVT() const { + Record *R = getOrigPatFragRecord()->getRecord(); + if (R->isValueUnset("StoreScalarMemoryVT")) + return nullptr; + return R->getValueAsDef("StoreScalarMemoryVT"); +} + std::string TreePredicateFn::getImmType() const { if (immCodeUsesAPInt()) return "const APInt &"; Index: utils/TableGen/GlobalISelEmitter.cpp =================================================================== --- utils/TableGen/GlobalISelEmitter.cpp +++ utils/TableGen/GlobalISelEmitter.cpp @@ -158,6 +158,34 @@ Explanation += " always-true"; if (P.isImmediatePattern()) Explanation += " immediate"; + + if (P.isUnindexedLoad()) + Explanation += " unindexed-load"; + if (P.isUnindexedStore()) + Explanation += " unindexed-store"; + + if (P.isNonExtLoad()) + Explanation += " non-extload"; + if (P.isAnyExtLoad()) + Explanation += " extload"; + if (P.isSignExtLoad()) + Explanation += " sextload"; + if (P.isZeroExtLoad()) + Explanation += " zextload"; + + if (P.isNonTruncStore()) + Explanation += " non-truncstore"; + if (P.isTruncStore()) + Explanation += " truncstore"; + + if (Record *VT = P.getLoadMemoryVT()) + Explanation += (" LoadMemVT=" + VT->getName()).str(); + if (Record *VT = P.getLoadScalarMemoryVT()) + Explanation += (" ScalarVT(LoadMemVT)=" + VT->getName()).str(); + if (Record *VT = P.getStoreMemoryVT()) + Explanation += (" StoreMemVT=" + VT->getName()).str(); + if (Record *VT = P.getStoreScalarMemoryVT()) + Explanation += (" ScalarVT(StoreMemVT)=" + VT->getName()).str(); } return Explanation; } @@ -192,6 +220,9 @@ HasUnsupportedPredicate = true; Explanation = Separator + "Has a predicate (" + explainPredicates(N) + ")"; Separator = ", "; + Explanation += (Separator + "first-failing:" + + Predicate.getOrigPatFragRecord()->getRecord()->getName()) + .str(); break; }