diff --git a/clang/include/clang/AST/ASTContext.h b/clang/include/clang/AST/ASTContext.h --- a/clang/include/clang/AST/ASTContext.h +++ b/clang/include/clang/AST/ASTContext.h @@ -1020,6 +1020,8 @@ #include "clang/Basic/PPCTypes.def" #define RVV_TYPE(Name, Id, SingletonId) \ CanQualType SingletonId; +#define RVV_TUPLE(Name, ElemId, Id, SingletonId, NE, EB, NF, IsSigned, IsFP) \ + QualType SingletonId; #include "clang/Basic/RISCVVTypes.def" // Types for deductions in C++0x [stmt.ranged]'s desugaring. Built on demand. @@ -1353,7 +1355,8 @@ /// element type and scalable number of elements. /// /// \pre \p EltTy must be a built-in type. - QualType getScalableVectorType(QualType EltTy, unsigned NumElts) const; + QualType getScalableVectorType(QualType EltTy, unsigned NumElts, + unsigned Tuple) const; /// Return the unique reference to a vector type of the specified /// element type and size. @@ -2967,6 +2970,8 @@ private: void InitBuiltinType(CanQualType &R, BuiltinType::Kind K); + void InitRVVTupleType(QualType &R, StringRef Name, BuiltinType::Kind K, + unsigned NF); class ObjCEncOptions { unsigned Bits; diff --git a/clang/include/clang/Basic/Builtins.def b/clang/include/clang/Basic/Builtins.def --- a/clang/include/clang/Basic/Builtins.def +++ b/clang/include/clang/Basic/Builtins.def @@ -58,6 +58,8 @@ // S -> signed // U -> unsigned // I -> Required to constant fold to an integer constant expression. +// T -> The number of fields in scalable vector structures. +// (e.g. TTTTTq for scalable vector structure with 5 fields) // // Types may be postfixed with the following modifiers: // * -> pointer (optionally followed by an address space number, if no address diff --git a/clang/include/clang/Basic/RISCVVTypes.def b/clang/include/clang/Basic/RISCVVTypes.def --- a/clang/include/clang/Basic/RISCVVTypes.def +++ b/clang/include/clang/Basic/RISCVVTypes.def @@ -60,6 +60,21 @@ RVV_VECTOR_TYPE(Name, Id, SingletonId, NumEls, ElBits, NF, false, true) #endif +#ifndef RVV_TUPLE +#define RVV_TUPLE(Name, ElemId, Id, SingletonId, NumEls, ElBits, NF, IsSigned, false) +#endif + +#ifndef RVV_ITUPLE +#define RVV_ITUPLE(Name, ElemId, Id, SingletonId, NumEls, ElBits, NF, IsSigned) \ + RVV_TUPLE(Name, ElemId, Id, SingletonId, NumEls, ElBits, NF, IsSigned, false) +#endif + +#ifndef RVV_FTUPLE +#define RVV_FTUPLE(Name, ElemId, Id, SingletonId, NumEls, ElBits, NF) \ + RVV_TUPLE(Name, ElemId, Id, SingletonId, NumEls, ElBits, NF, false, true) +#endif + + //===- Vector types -------------------------------------------------------===// RVV_VECTOR_TYPE_INT("__rvv_int8mf8_t", RvvInt8mf8, RvvInt8mf8Ty, 1, 8, 1, true) @@ -140,6 +155,246 @@ RVV_PREDICATE_TYPE("__rvv_bool32_t", RvvBool32, RvvBool32Ty, 2) RVV_PREDICATE_TYPE("__rvv_bool64_t", RvvBool64, RvvBool64Ty, 1) +RVV_ITUPLE("__rvv_int8mf8x2_t", RvvInt8mf8, RvvInt8mf8x2, RvvInt8mf8x2Ty, 1, 8, 2, true) +RVV_ITUPLE("__rvv_int8mf8x3_t", RvvInt8mf8, RvvInt8mf8x3, RvvInt8mf8x3Ty, 1, 8, 3, true) +RVV_ITUPLE("__rvv_int8mf8x4_t", RvvInt8mf8, RvvInt8mf8x4, RvvInt8mf8x4Ty, 1, 8, 4, true) +RVV_ITUPLE("__rvv_int8mf8x5_t", RvvInt8mf8, RvvInt8mf8x5, RvvInt8mf8x5Ty, 1, 8, 5, true) +RVV_ITUPLE("__rvv_int8mf8x6_t", RvvInt8mf8, RvvInt8mf8x6, RvvInt8mf8x6Ty, 1, 8, 6, true) +RVV_ITUPLE("__rvv_int8mf8x7_t", RvvInt8mf8, RvvInt8mf8x7, RvvInt8mf8x7Ty, 1, 8, 7, true) +RVV_ITUPLE("__rvv_int8mf8x8_t", RvvInt8mf8, RvvInt8mf8x8, RvvInt8mf8x8Ty, 1, 8, 8, true) +RVV_ITUPLE("__rvv_int8mf4x2_t", RvvInt8mf4, RvvInt8mf4x2, RvvInt8mf4x2Ty, 2, 8, 2, true) +RVV_ITUPLE("__rvv_int8mf4x3_t", RvvInt8mf4, RvvInt8mf4x3, RvvInt8mf4x3Ty, 2, 8, 3, true) +RVV_ITUPLE("__rvv_int8mf4x4_t", RvvInt8mf4, RvvInt8mf4x4, RvvInt8mf4x4Ty, 2, 8, 4, true) +RVV_ITUPLE("__rvv_int8mf4x5_t", RvvInt8mf4, RvvInt8mf4x5, RvvInt8mf4x5Ty, 2, 8, 5, true) +RVV_ITUPLE("__rvv_int8mf4x6_t", RvvInt8mf4, RvvInt8mf4x6, RvvInt8mf4x6Ty, 2, 8, 6, true) +RVV_ITUPLE("__rvv_int8mf4x7_t", RvvInt8mf4, RvvInt8mf4x7, RvvInt8mf4x7Ty, 2, 8, 7, true) +RVV_ITUPLE("__rvv_int8mf4x8_t", RvvInt8mf4, RvvInt8mf4x8, RvvInt8mf4x8Ty, 2, 8, 8, true) +RVV_ITUPLE("__rvv_int8mf2x2_t", RvvInt8mf2, RvvInt8mf2x2, RvvInt8mf2x2Ty, 4, 8, 2, true) +RVV_ITUPLE("__rvv_int8mf2x3_t", RvvInt8mf2, RvvInt8mf2x3, RvvInt8mf2x3Ty, 4, 8, 3, true) +RVV_ITUPLE("__rvv_int8mf2x4_t", RvvInt8mf2, RvvInt8mf2x4, RvvInt8mf2x4Ty, 4, 8, 4, true) +RVV_ITUPLE("__rvv_int8mf2x5_t", RvvInt8mf2, RvvInt8mf2x5, RvvInt8mf2x5Ty, 4, 8, 5, true) +RVV_ITUPLE("__rvv_int8mf2x6_t", RvvInt8mf2, RvvInt8mf2x6, RvvInt8mf2x6Ty, 4, 8, 6, true) +RVV_ITUPLE("__rvv_int8mf2x7_t", RvvInt8mf2, RvvInt8mf2x7, RvvInt8mf2x7Ty, 4, 8, 7, true) +RVV_ITUPLE("__rvv_int8mf2x8_t", RvvInt8mf2, RvvInt8mf2x8, RvvInt8mf2x8Ty, 4, 8, 8, true) +RVV_ITUPLE("__rvv_int8m1x2_t", RvvInt8m1, RvvInt8m1x2, RvvInt8m1x2Ty, 8, 8, 2, true) +RVV_ITUPLE("__rvv_int8m1x3_t", RvvInt8m1, RvvInt8m1x3, RvvInt8m1x3Ty, 8, 8, 3, true) +RVV_ITUPLE("__rvv_int8m1x4_t", RvvInt8m1, RvvInt8m1x4, RvvInt8m1x4Ty, 8, 8, 4, true) +RVV_ITUPLE("__rvv_int8m1x5_t", RvvInt8m1, RvvInt8m1x5, RvvInt8m1x5Ty, 8, 8, 5, true) +RVV_ITUPLE("__rvv_int8m1x6_t", RvvInt8m1, RvvInt8m1x6, RvvInt8m1x6Ty, 8, 8, 6, true) +RVV_ITUPLE("__rvv_int8m1x7_t", RvvInt8m1, RvvInt8m1x7, RvvInt8m1x7Ty, 8, 8, 7, true) +RVV_ITUPLE("__rvv_int8m1x8_t", RvvInt8m1, RvvInt8m1x8, RvvInt8m1x8Ty, 8, 8, 8, true) +RVV_ITUPLE("__rvv_int8m2x2_t", RvvInt8m2, RvvInt8m2x2, RvvInt8m2x2Ty, 16, 8, 2, true) +RVV_ITUPLE("__rvv_int8m2x3_t", RvvInt8m2, RvvInt8m2x3, RvvInt8m2x3Ty, 16, 8, 3, true) +RVV_ITUPLE("__rvv_int8m2x4_t", RvvInt8m2, RvvInt8m2x4, RvvInt8m2x4Ty, 16, 8, 4, true) +RVV_ITUPLE("__rvv_int8m4x2_t", RvvInt8m4, RvvInt8m4x2, RvvInt8m4x2Ty, 32, 8, 2, true) + +RVV_ITUPLE("__rvv_uint8mf8x2_t", RvvUint8mf8, RvvUint8mf8x2, RvvUint8mf8x2Ty, 1, 8, 2, false) +RVV_ITUPLE("__rvv_uint8mf8x3_t", RvvUint8mf8, RvvUint8mf8x3, RvvUint8mf8x3Ty, 1, 8, 3, false) +RVV_ITUPLE("__rvv_uint8mf8x4_t", RvvUint8mf8, RvvUint8mf8x4, RvvUint8mf8x4Ty, 1, 8, 4, false) +RVV_ITUPLE("__rvv_uint8mf8x5_t", RvvUint8mf8, RvvUint8mf8x5, RvvUint8mf8x5Ty, 1, 8, 5, false) +RVV_ITUPLE("__rvv_uint8mf8x6_t", RvvUint8mf8, RvvUint8mf8x6, RvvUint8mf8x6Ty, 1, 8, 6, false) +RVV_ITUPLE("__rvv_uint8mf8x7_t", RvvUint8mf8, RvvUint8mf8x7, RvvUint8mf8x7Ty, 1, 8, 7, false) +RVV_ITUPLE("__rvv_uint8mf8x8_t", RvvUint8mf8, RvvUint8mf8x8, RvvUint8mf8x8Ty, 1, 8, 8, false) +RVV_ITUPLE("__rvv_uint8mf4x2_t", RvvUint8mf4, RvvUint8mf4x2, RvvUint8mf4x2Ty, 2, 8, 2, false) +RVV_ITUPLE("__rvv_uint8mf4x3_t", RvvUint8mf4, RvvUint8mf4x3, RvvUint8mf4x3Ty, 2, 8, 3, false) +RVV_ITUPLE("__rvv_uint8mf4x4_t", RvvUint8mf4, RvvUint8mf4x4, RvvUint8mf4x4Ty, 2, 8, 4, false) +RVV_ITUPLE("__rvv_uint8mf4x5_t", RvvUint8mf4, RvvUint8mf4x5, RvvUint8mf4x5Ty, 2, 8, 5, false) +RVV_ITUPLE("__rvv_uint8mf4x6_t", RvvUint8mf4, RvvUint8mf4x6, RvvUint8mf4x6Ty, 2, 8, 6, false) +RVV_ITUPLE("__rvv_uint8mf4x7_t", RvvUint8mf4, RvvUint8mf4x7, RvvUint8mf4x7Ty, 2, 8, 7, false) +RVV_ITUPLE("__rvv_uint8mf4x8_t", RvvUint8mf4, RvvUint8mf4x8, RvvUint8mf4x8Ty, 2, 8, 8, false) +RVV_ITUPLE("__rvv_uint8mf2x2_t", RvvUint8mf2, RvvUint8mf2x2, RvvUint8mf2x2Ty, 4, 8, 2, false) +RVV_ITUPLE("__rvv_uint8mf2x3_t", RvvUint8mf2, RvvUint8mf2x3, RvvUint8mf2x3Ty, 4, 8, 3, false) +RVV_ITUPLE("__rvv_uint8mf2x4_t", RvvUint8mf2, RvvUint8mf2x4, RvvUint8mf2x4Ty, 4, 8, 4, false) +RVV_ITUPLE("__rvv_uint8mf2x5_t", RvvUint8mf2, RvvUint8mf2x5, RvvUint8mf2x5Ty, 4, 8, 5, false) +RVV_ITUPLE("__rvv_uint8mf2x6_t", RvvUint8mf2, RvvUint8mf2x6, RvvUint8mf2x6Ty, 4, 8, 6, false) +RVV_ITUPLE("__rvv_uint8mf2x7_t", RvvUint8mf2, RvvUint8mf2x7, RvvUint8mf2x7Ty, 4, 8, 7, false) +RVV_ITUPLE("__rvv_uint8mf2x8_t", RvvUint8mf2, RvvUint8mf2x8, RvvUint8mf2x8Ty, 4, 8, 8, false) +RVV_ITUPLE("__rvv_uint8m1x2_t", RvvUint8m1, RvvUint8m1x2, RvvUint8m1x2Ty, 8, 8, 2, false) +RVV_ITUPLE("__rvv_uint8m1x3_t", RvvUint8m1, RvvUint8m1x3, RvvUint8m1x3Ty, 8, 8, 3, false) +RVV_ITUPLE("__rvv_uint8m1x4_t", RvvUint8m1, RvvUint8m1x4, RvvUint8m1x4Ty, 8, 8, 4, false) +RVV_ITUPLE("__rvv_uint8m1x5_t", RvvUint8m1, RvvUint8m1x5, RvvUint8m1x5Ty, 8, 8, 5, false) +RVV_ITUPLE("__rvv_uint8m1x6_t", RvvUint8m1, RvvUint8m1x6, RvvUint8m1x6Ty, 8, 8, 6, false) +RVV_ITUPLE("__rvv_uint8m1x7_t", RvvUint8m1, RvvUint8m1x7, RvvUint8m1x7Ty, 8, 8, 7, false) +RVV_ITUPLE("__rvv_uint8m1x8_t", RvvUint8m1, RvvUint8m1x8, RvvUint8m1x8Ty, 8, 8, 8, false) +RVV_ITUPLE("__rvv_uint8m2x2_t", RvvUint8m2, RvvUint8m2x2, RvvUint8m2x2Ty, 16, 8, 2, false) +RVV_ITUPLE("__rvv_uint8m2x3_t", RvvUint8m2, RvvUint8m2x3, RvvUint8m2x3Ty, 16, 8, 3, false) +RVV_ITUPLE("__rvv_uint8m2x4_t", RvvUint8m2, RvvUint8m2x4, RvvUint8m2x4Ty, 16, 8, 4, false) +RVV_ITUPLE("__rvv_uint8m4x2_t", RvvUint8m4, RvvUint8m4x2, RvvUint8m4x2Ty, 32, 8, 2, false) + +RVV_ITUPLE("__rvv_int16mf4x2_t", RvvInt16mf4, RvvInt16mf4x2, RvvInt16mf4x2Ty, 1, 16, 2, true) +RVV_ITUPLE("__rvv_int16mf4x3_t", RvvInt16mf4, RvvInt16mf4x3, RvvInt16mf4x3Ty, 1, 16, 3, true) +RVV_ITUPLE("__rvv_int16mf4x4_t", RvvInt16mf4, RvvInt16mf4x4, RvvInt16mf4x4Ty, 1, 16, 4, true) +RVV_ITUPLE("__rvv_int16mf4x5_t", RvvInt16mf4, RvvInt16mf4x5, RvvInt16mf4x5Ty, 1, 16, 5, true) +RVV_ITUPLE("__rvv_int16mf4x6_t", RvvInt16mf4, RvvInt16mf4x6, RvvInt16mf4x6Ty, 1, 16, 6, true) +RVV_ITUPLE("__rvv_int16mf4x7_t", RvvInt16mf4, RvvInt16mf4x7, RvvInt16mf4x7Ty, 1, 16, 7, true) +RVV_ITUPLE("__rvv_int16mf4x8_t", RvvInt16mf4, RvvInt16mf4x8, RvvInt16mf4x8Ty, 1, 16, 8, true) +RVV_ITUPLE("__rvv_int16mf2x2_t", RvvInt16mf2, RvvInt16mf2x2, RvvInt16mf2x2Ty, 2, 16, 2, true) +RVV_ITUPLE("__rvv_int16mf2x3_t", RvvInt16mf2, RvvInt16mf2x3, RvvInt16mf2x3Ty, 2, 16, 3, true) +RVV_ITUPLE("__rvv_int16mf2x4_t", RvvInt16mf2, RvvInt16mf2x4, RvvInt16mf2x4Ty, 2, 16, 4, true) +RVV_ITUPLE("__rvv_int16mf2x5_t", RvvInt16mf2, RvvInt16mf2x5, RvvInt16mf2x5Ty, 2, 16, 5, true) +RVV_ITUPLE("__rvv_int16mf2x6_t", RvvInt16mf2, RvvInt16mf2x6, RvvInt16mf2x6Ty, 2, 16, 6, true) +RVV_ITUPLE("__rvv_int16mf2x7_t", RvvInt16mf2, RvvInt16mf2x7, RvvInt16mf2x7Ty, 2, 16, 7, true) +RVV_ITUPLE("__rvv_int16mf2x8_t", RvvInt16mf2, RvvInt16mf2x8, RvvInt16mf2x8Ty, 2, 16, 8, true) +RVV_ITUPLE("__rvv_int16m1x2_t", RvvInt16m1, RvvInt16m1x2, RvvInt16m1x2Ty, 4, 16, 2, true) +RVV_ITUPLE("__rvv_int16m1x3_t", RvvInt16m1, RvvInt16m1x3, RvvInt16m1x3Ty, 4, 16, 3, true) +RVV_ITUPLE("__rvv_int16m1x4_t", RvvInt16m1, RvvInt16m1x4, RvvInt16m1x4Ty, 4, 16, 4, true) +RVV_ITUPLE("__rvv_int16m1x5_t", RvvInt16m1, RvvInt16m1x5, RvvInt16m1x5Ty, 4, 16, 5, true) +RVV_ITUPLE("__rvv_int16m1x6_t", RvvInt16m1, RvvInt16m1x6, RvvInt16m1x6Ty, 4, 16, 6, true) +RVV_ITUPLE("__rvv_int16m1x7_t", RvvInt16m1, RvvInt16m1x7, RvvInt16m1x7Ty, 4, 16, 7, true) +RVV_ITUPLE("__rvv_int16m1x8_t", RvvInt16m1, RvvInt16m1x8, RvvInt16m1x8Ty, 4, 16, 8, true) +RVV_ITUPLE("__rvv_int16m2x2_t", RvvInt16m2, RvvInt16m2x2, RvvInt16m2x2Ty, 8, 16, 2, true) +RVV_ITUPLE("__rvv_int16m2x3_t", RvvInt16m2, RvvInt16m2x3, RvvInt16m2x3Ty, 8, 16, 3, true) +RVV_ITUPLE("__rvv_int16m2x4_t", RvvInt16m2, RvvInt16m2x4, RvvInt16m2x4Ty, 8, 16, 4, true) +RVV_ITUPLE("__rvv_int16m4x2_t", RvvInt16m4, RvvInt16m4x2, RvvInt16m4x2Ty, 16, 16, 2, true) + +RVV_ITUPLE("__rvv_uint16mf4x2_t", RvvUint16mf4, RvvUint16mf4x2, RvvUint16mf4x2Ty, 1, 16, 2, false) +RVV_ITUPLE("__rvv_uint16mf4x3_t", RvvUint16mf4, RvvUint16mf4x3, RvvUint16mf4x3Ty, 1, 16, 3, false) +RVV_ITUPLE("__rvv_uint16mf4x4_t", RvvUint16mf4, RvvUint16mf4x4, RvvUint16mf4x4Ty, 1, 16, 4, false) +RVV_ITUPLE("__rvv_uint16mf4x5_t", RvvUint16mf4, RvvUint16mf4x5, RvvUint16mf4x5Ty, 1, 16, 5, false) +RVV_ITUPLE("__rvv_uint16mf4x6_t", RvvUint16mf4, RvvUint16mf4x6, RvvUint16mf4x6Ty, 1, 16, 6, false) +RVV_ITUPLE("__rvv_uint16mf4x7_t", RvvUint16mf4, RvvUint16mf4x7, RvvUint16mf4x7Ty, 1, 16, 7, false) +RVV_ITUPLE("__rvv_uint16mf4x8_t", RvvUint16mf4, RvvUint16mf4x8, RvvUint16mf4x8Ty, 1, 16, 8, false) +RVV_ITUPLE("__rvv_uint16mf2x2_t", RvvUint16mf2, RvvUint16mf2x2, RvvUint16mf2x2Ty, 2, 16, 2, false) +RVV_ITUPLE("__rvv_uint16mf2x3_t", RvvUint16mf2, RvvUint16mf2x3, RvvUint16mf2x3Ty, 2, 16, 3, false) +RVV_ITUPLE("__rvv_uint16mf2x4_t", RvvUint16mf2, RvvUint16mf2x4, RvvUint16mf2x4Ty, 2, 16, 4, false) +RVV_ITUPLE("__rvv_uint16mf2x5_t", RvvUint16mf2, RvvUint16mf2x5, RvvUint16mf2x5Ty, 2, 16, 5, false) +RVV_ITUPLE("__rvv_uint16mf2x6_t", RvvUint16mf2, RvvUint16mf2x6, RvvUint16mf2x6Ty, 2, 16, 6, false) +RVV_ITUPLE("__rvv_uint16mf2x7_t", RvvUint16mf2, RvvUint16mf2x7, RvvUint16mf2x7Ty, 2, 16, 7, false) +RVV_ITUPLE("__rvv_uint16mf2x8_t", RvvUint16mf2, RvvUint16mf2x8, RvvUint16mf2x8Ty, 2, 16, 8, false) +RVV_ITUPLE("__rvv_uint16m1x2_t", RvvUint16m1, RvvUint16m1x2, RvvUint16m1x2Ty, 4, 16, 2, false) +RVV_ITUPLE("__rvv_uint16m1x3_t", RvvUint16m1, RvvUint16m1x3, RvvUint16m1x3Ty, 4, 16, 3, false) +RVV_ITUPLE("__rvv_uint16m1x4_t", RvvUint16m1, RvvUint16m1x4, RvvUint16m1x4Ty, 4, 16, 4, false) +RVV_ITUPLE("__rvv_uint16m1x5_t", RvvUint16m1, RvvUint16m1x5, RvvUint16m1x5Ty, 4, 16, 5, false) +RVV_ITUPLE("__rvv_uint16m1x6_t", RvvUint16m1, RvvUint16m1x6, RvvUint16m1x6Ty, 4, 16, 6, false) +RVV_ITUPLE("__rvv_uint16m1x7_t", RvvUint16m1, RvvUint16m1x7, RvvUint16m1x7Ty, 4, 16, 7, false) +RVV_ITUPLE("__rvv_uint16m1x8_t", RvvUint16m1, RvvUint16m1x8, RvvUint16m1x8Ty, 4, 16, 8, false) +RVV_ITUPLE("__rvv_uint16m2x2_t", RvvUint16m2, RvvUint16m2x2, RvvUint16m2x2Ty, 8, 16, 2, false) +RVV_ITUPLE("__rvv_uint16m2x3_t", RvvUint16m2, RvvUint16m2x3, RvvUint16m2x3Ty, 8, 16, 3, false) +RVV_ITUPLE("__rvv_uint16m2x4_t", RvvUint16m2, RvvUint16m2x4, RvvUint16m2x4Ty, 8, 16, 4, false) +RVV_ITUPLE("__rvv_uint16m4x2_t", RvvUint16m4, RvvUint16m4x2, RvvUint16m4x2Ty, 16, 16, 2, false) + +RVV_ITUPLE("__rvv_int32mf2x2_t", RvvInt32mf2, RvvInt32mf2x2, RvvInt32mf2x2Ty, 1, 32, 2, true) +RVV_ITUPLE("__rvv_int32mf2x3_t", RvvInt32mf2, RvvInt32mf2x3, RvvInt32mf2x3Ty, 1, 32, 3, true) +RVV_ITUPLE("__rvv_int32mf2x4_t", RvvInt32mf2, RvvInt32mf2x4, RvvInt32mf2x4Ty, 1, 32, 4, true) +RVV_ITUPLE("__rvv_int32mf2x5_t", RvvInt32mf2, RvvInt32mf2x5, RvvInt32mf2x5Ty, 1, 32, 5, true) +RVV_ITUPLE("__rvv_int32mf2x6_t", RvvInt32mf2, RvvInt32mf2x6, RvvInt32mf2x6Ty, 1, 32, 6, true) +RVV_ITUPLE("__rvv_int32mf2x7_t", RvvInt32mf2, RvvInt32mf2x7, RvvInt32mf2x7Ty, 1, 32, 7, true) +RVV_ITUPLE("__rvv_int32mf2x8_t", RvvInt32mf2, RvvInt32mf2x8, RvvInt32mf2x8Ty, 1, 32, 8, true) +RVV_ITUPLE("__rvv_int32m1x2_t", RvvInt32m1, RvvInt32m1x2, RvvInt32m1x2Ty, 2, 32, 2, true) +RVV_ITUPLE("__rvv_int32m1x3_t", RvvInt32m1, RvvInt32m1x3, RvvInt32m1x3Ty, 2, 32, 3, true) +RVV_ITUPLE("__rvv_int32m1x4_t", RvvInt32m1, RvvInt32m1x4, RvvInt32m1x4Ty, 2, 32, 4, true) +RVV_ITUPLE("__rvv_int32m1x5_t", RvvInt32m1, RvvInt32m1x5, RvvInt32m1x5Ty, 2, 32, 5, true) +RVV_ITUPLE("__rvv_int32m1x6_t", RvvInt32m1, RvvInt32m1x6, RvvInt32m1x6Ty, 2, 32, 6, true) +RVV_ITUPLE("__rvv_int32m1x7_t", RvvInt32m1, RvvInt32m1x7, RvvInt32m1x7Ty, 2, 32, 7, true) +RVV_ITUPLE("__rvv_int32m1x8_t", RvvInt32m1, RvvInt32m1x8, RvvInt32m1x8Ty, 2, 32, 8, true) +RVV_ITUPLE("__rvv_int32m2x2_t", RvvInt32m2, RvvInt32m2x2, RvvInt32m2x2Ty, 4, 32, 2, true) +RVV_ITUPLE("__rvv_int32m2x3_t", RvvInt32m2, RvvInt32m2x3, RvvInt32m2x3Ty, 4, 32, 3, true) +RVV_ITUPLE("__rvv_int32m2x4_t", RvvInt32m2, RvvInt32m2x4, RvvInt32m2x4Ty, 4, 32, 4, true) +RVV_ITUPLE("__rvv_int32m4x2_t", RvvInt32m4, RvvInt32m4x2, RvvInt32m4x2Ty, 8, 32, 2, true) + +RVV_ITUPLE("__rvv_uint32mf2x2_t", RvvUint32mf2, RvvUint32mf2x2, RvvUint32mf2x2Ty, 1, 32, 2, false) +RVV_ITUPLE("__rvv_uint32mf2x3_t", RvvUint32mf2, RvvUint32mf2x3, RvvUint32mf2x3Ty, 1, 32, 3, false) +RVV_ITUPLE("__rvv_uint32mf2x4_t", RvvUint32mf2, RvvUint32mf2x4, RvvUint32mf2x4Ty, 1, 32, 4, false) +RVV_ITUPLE("__rvv_uint32mf2x5_t", RvvUint32mf2, RvvUint32mf2x5, RvvUint32mf2x5Ty, 1, 32, 5, false) +RVV_ITUPLE("__rvv_uint32mf2x6_t", RvvUint32mf2, RvvUint32mf2x6, RvvUint32mf2x6Ty, 1, 32, 6, false) +RVV_ITUPLE("__rvv_uint32mf2x7_t", RvvUint32mf2, RvvUint32mf2x7, RvvUint32mf2x7Ty, 1, 32, 7, false) +RVV_ITUPLE("__rvv_uint32mf2x8_t", RvvUint32mf2, RvvUint32mf2x8, RvvUint32mf2x8Ty, 1, 32, 8, false) +RVV_ITUPLE("__rvv_uint32m1x2_t", RvvUint32m1, RvvUint32m1x2, RvvUint32m1x2Ty, 2, 32, 2, false) +RVV_ITUPLE("__rvv_uint32m1x3_t", RvvUint32m1, RvvUint32m1x3, RvvUint32m1x3Ty, 2, 32, 3, false) +RVV_ITUPLE("__rvv_uint32m1x4_t", RvvUint32m1, RvvUint32m1x4, RvvUint32m1x4Ty, 2, 32, 4, false) +RVV_ITUPLE("__rvv_uint32m1x5_t", RvvUint32m1, RvvUint32m1x5, RvvUint32m1x5Ty, 2, 32, 5, false) +RVV_ITUPLE("__rvv_uint32m1x6_t", RvvUint32m1, RvvUint32m1x6, RvvUint32m1x6Ty, 2, 32, 6, false) +RVV_ITUPLE("__rvv_uint32m1x7_t", RvvUint32m1, RvvUint32m1x7, RvvUint32m1x7Ty, 2, 32, 7, false) +RVV_ITUPLE("__rvv_uint32m1x8_t", RvvUint32m1, RvvUint32m1x8, RvvUint32m1x8Ty, 2, 32, 8, false) +RVV_ITUPLE("__rvv_uint32m2x2_t", RvvUint32m2, RvvUint32m2x2, RvvUint32m2x2Ty, 4, 32, 2, false) +RVV_ITUPLE("__rvv_uint32m2x3_t", RvvUint32m2, RvvUint32m2x3, RvvUint32m2x3Ty, 4, 32, 3, false) +RVV_ITUPLE("__rvv_uint32m2x4_t", RvvUint32m2, RvvUint32m2x4, RvvUint32m2x4Ty, 4, 32, 4, false) +RVV_ITUPLE("__rvv_uint32m4x2_t", RvvUint32m4, RvvUint32m4x2, RvvUint32m4x2Ty, 8, 32, 2, false) + +RVV_ITUPLE("__rvv_int64m1x2_t", RvvInt64m1, RvvInt64m1x2, RvvInt64m1x2Ty, 1, 64, 2, true) +RVV_ITUPLE("__rvv_int64m1x3_t", RvvInt64m1, RvvInt64m1x3, RvvInt64m1x3Ty, 1, 64, 3, true) +RVV_ITUPLE("__rvv_int64m1x4_t", RvvInt64m1, RvvInt64m1x4, RvvInt64m1x4Ty, 1, 64, 4, true) +RVV_ITUPLE("__rvv_int64m1x5_t", RvvInt64m1, RvvInt64m1x5, RvvInt64m1x5Ty, 1, 64, 5, true) +RVV_ITUPLE("__rvv_int64m1x6_t", RvvInt64m1, RvvInt64m1x6, RvvInt64m1x6Ty, 1, 64, 6, true) +RVV_ITUPLE("__rvv_int64m1x7_t", RvvInt64m1, RvvInt64m1x7, RvvInt64m1x7Ty, 1, 64, 7, true) +RVV_ITUPLE("__rvv_int64m1x8_t", RvvInt64m1, RvvInt64m1x8, RvvInt64m1x8Ty, 1, 64, 8, true) +RVV_ITUPLE("__rvv_int64m2x2_t", RvvInt64m2, RvvInt64m2x2, RvvInt64m2x2Ty, 2, 64, 2, true) +RVV_ITUPLE("__rvv_int64m2x3_t", RvvInt64m2, RvvInt64m2x3, RvvInt64m2x3Ty, 2, 64, 3, true) +RVV_ITUPLE("__rvv_int64m2x4_t", RvvInt64m2, RvvInt64m2x4, RvvInt64m2x4Ty, 2, 64, 4, true) +RVV_ITUPLE("__rvv_int64m4x2_t", RvvInt64m4, RvvInt64m4x2, RvvInt64m4x2Ty, 4, 64, 2, true) + +RVV_ITUPLE("__rvv_uint64m1x2_t", RvvUint64m1, RvvUint64m1x2, RvvUint64m1x2Ty, 1, 64, 2, false) +RVV_ITUPLE("__rvv_uint64m1x3_t", RvvUint64m1, RvvUint64m1x3, RvvUint64m1x3Ty, 1, 64, 3, false) +RVV_ITUPLE("__rvv_uint64m1x4_t", RvvUint64m1, RvvUint64m1x4, RvvUint64m1x4Ty, 1, 64, 4, false) +RVV_ITUPLE("__rvv_uint64m1x5_t", RvvUint64m1, RvvUint64m1x5, RvvUint64m1x5Ty, 1, 64, 5, false) +RVV_ITUPLE("__rvv_uint64m1x6_t", RvvUint64m1, RvvUint64m1x6, RvvUint64m1x6Ty, 1, 64, 6, false) +RVV_ITUPLE("__rvv_uint64m1x7_t", RvvUint64m1, RvvUint64m1x7, RvvUint64m1x7Ty, 1, 64, 7, false) +RVV_ITUPLE("__rvv_uint64m1x8_t", RvvUint64m1, RvvUint64m1x8, RvvUint64m1x8Ty, 1, 64, 8, false) +RVV_ITUPLE("__rvv_uint64m2x2_t", RvvUint64m2, RvvUint64m2x2, RvvUint64m2x2Ty, 2, 64, 2, false) +RVV_ITUPLE("__rvv_uint64m2x3_t", RvvUint64m2, RvvUint64m2x3, RvvUint64m2x3Ty, 2, 64, 3, false) +RVV_ITUPLE("__rvv_uint64m2x4_t", RvvUint64m2, RvvUint64m2x4, RvvUint64m2x4Ty, 2, 64, 4, false) +RVV_ITUPLE("__rvv_uint64m4x2_t", RvvUint64m4, RvvUint64m4x2, RvvUint64m4x2Ty, 4, 64, 2, false) + +RVV_FTUPLE("__rvv_float16mf4x2_t", RvvFloat16mf4, RvvFloat16mf4x2, RvvFloat16mf4x2Ty, 1, 16, 2) +RVV_FTUPLE("__rvv_float16mf4x3_t", RvvFloat16mf4, RvvFloat16mf4x3, RvvFloat16mf4x3Ty, 1, 16, 3) +RVV_FTUPLE("__rvv_float16mf4x4_t", RvvFloat16mf4, RvvFloat16mf4x4, RvvFloat16mf4x4Ty, 1, 16, 4) +RVV_FTUPLE("__rvv_float16mf4x5_t", RvvFloat16mf4, RvvFloat16mf4x5, RvvFloat16mf4x5Ty, 1, 16, 5) +RVV_FTUPLE("__rvv_float16mf4x6_t", RvvFloat16mf4, RvvFloat16mf4x6, RvvFloat16mf4x6Ty, 1, 16, 6) +RVV_FTUPLE("__rvv_float16mf4x7_t", RvvFloat16mf4, RvvFloat16mf4x7, RvvFloat16mf4x7Ty, 1, 16, 7) +RVV_FTUPLE("__rvv_float16mf4x8_t", RvvFloat16mf4, RvvFloat16mf4x8, RvvFloat16mf4x8Ty, 1, 16, 8) +RVV_FTUPLE("__rvv_float16mf2x2_t", RvvFloat16mf2, RvvFloat16mf2x2, RvvFloat16mf2x2Ty, 2, 16, 2) +RVV_FTUPLE("__rvv_float16mf2x3_t", RvvFloat16mf2, RvvFloat16mf2x3, RvvFloat16mf2x3Ty, 2, 16, 3) +RVV_FTUPLE("__rvv_float16mf2x4_t", RvvFloat16mf2, RvvFloat16mf2x4, RvvFloat16mf2x4Ty, 2, 16, 4) +RVV_FTUPLE("__rvv_float16mf2x5_t", RvvFloat16mf2, RvvFloat16mf2x5, RvvFloat16mf2x5Ty, 2, 16, 5) +RVV_FTUPLE("__rvv_float16mf2x6_t", RvvFloat16mf2, RvvFloat16mf2x6, RvvFloat16mf2x6Ty, 2, 16, 6) +RVV_FTUPLE("__rvv_float16mf2x7_t", RvvFloat16mf2, RvvFloat16mf2x7, RvvFloat16mf2x7Ty, 2, 16, 7) +RVV_FTUPLE("__rvv_float16mf2x8_t", RvvFloat16mf2, RvvFloat16mf2x8, RvvFloat16mf2x8Ty, 2, 16, 8) +RVV_FTUPLE("__rvv_float16m1x2_t", RvvFloat16m1, RvvFloat16m1x2, RvvFloat16m1x2Ty, 4, 16, 2) +RVV_FTUPLE("__rvv_float16m1x3_t", RvvFloat16m1, RvvFloat16m1x3, RvvFloat16m1x3Ty, 4, 16, 3) +RVV_FTUPLE("__rvv_float16m1x4_t", RvvFloat16m1, RvvFloat16m1x4, RvvFloat16m1x4Ty, 4, 16, 4) +RVV_FTUPLE("__rvv_float16m1x5_t", RvvFloat16m1, RvvFloat16m1x5, RvvFloat16m1x5Ty, 4, 16, 5) +RVV_FTUPLE("__rvv_float16m1x6_t", RvvFloat16m1, RvvFloat16m1x6, RvvFloat16m1x6Ty, 4, 16, 6) +RVV_FTUPLE("__rvv_float16m1x7_t", RvvFloat16m1, RvvFloat16m1x7, RvvFloat16m1x7Ty, 4, 16, 7) +RVV_FTUPLE("__rvv_float16m1x8_t", RvvFloat16m1, RvvFloat16m1x9, RvvFloat16m1x8Ty, 4, 16, 8) +RVV_FTUPLE("__rvv_float16m2x2_t", RvvFloat16m2, RvvFloat16m2x2, RvvFloat16m2x2Ty, 8, 16, 2) +RVV_FTUPLE("__rvv_float16m2x3_t", RvvFloat16m2, RvvFloat16m2x3, RvvFloat16m2x3Ty, 8, 16, 3) +RVV_FTUPLE("__rvv_float16m2x4_t", RvvFloat16m2, RvvFloat16m2x4, RvvFloat16m2x4Ty, 8, 16, 4) +RVV_FTUPLE("__rvv_float16m4x2_t", RvvFloat16m4, RvvFloat16m4x2, RvvFloat16m4x2Ty, 16, 16, 2) + +RVV_FTUPLE("__rvv_float32mf2x2_t", RvvFloat32mf2, RvvFloat32mf2x2, RvvFloat32mf2x2Ty, 1, 32, 2) +RVV_FTUPLE("__rvv_float32mf2x3_t", RvvFloat32mf2, RvvFloat32mf2x3, RvvFloat32mf2x3Ty, 1, 32, 3) +RVV_FTUPLE("__rvv_float32mf2x4_t", RvvFloat32mf2, RvvFloat32mf2x4, RvvFloat32mf2x4Ty, 1, 32, 4) +RVV_FTUPLE("__rvv_float32mf2x5_t", RvvFloat32mf2, RvvFloat32mf2x5, RvvFloat32mf2x5Ty, 1, 32, 5) +RVV_FTUPLE("__rvv_float32mf2x6_t", RvvFloat32mf2, RvvFloat32mf2x6, RvvFloat32mf2x6Ty, 1, 32, 6) +RVV_FTUPLE("__rvv_float32mf2x7_t", RvvFloat32mf2, RvvFloat32mf2x7, RvvFloat32mf2x7Ty, 1, 32, 7) +RVV_FTUPLE("__rvv_float32mf2x8_t", RvvFloat32mf2, RvvFloat32mf2x8, RvvFloat32mf2x8Ty, 1, 32, 8) +RVV_FTUPLE("__rvv_float32m1x2_t", RvvFloat32m1, RvvFloat32m1x2, RvvFloat32m1x2Ty, 2, 32, 2) +RVV_FTUPLE("__rvv_float32m1x3_t", RvvFloat32m1, RvvFloat32m1x3, RvvFloat32m1x3Ty, 2, 32, 3) +RVV_FTUPLE("__rvv_float32m1x4_t", RvvFloat32m1, RvvFloat32m1x4, RvvFloat32m1x4Ty, 2, 32, 4) +RVV_FTUPLE("__rvv_float32m1x5_t", RvvFloat32m1, RvvFloat32m1x5, RvvFloat32m1x5Ty, 2, 32, 5) +RVV_FTUPLE("__rvv_float32m1x6_t", RvvFloat32m1, RvvFloat32m1x6, RvvFloat32m1x6Ty, 2, 32, 6) +RVV_FTUPLE("__rvv_float32m1x7_t", RvvFloat32m1, RvvFloat32m1x7, RvvFloat32m1x7Ty, 2, 32, 7) +RVV_FTUPLE("__rvv_float32m1x8_t", RvvFloat32m1, RvvFloat32m1x8, RvvFloat32m1x8Ty, 2, 32, 8) +RVV_FTUPLE("__rvv_float32m2x2_t", RvvFloat32m2, RvvFloat32m2x2, RvvFloat32m2x2Ty, 4, 32, 2) +RVV_FTUPLE("__rvv_float32m2x3_t", RvvFloat32m2, RvvFloat32m2x3, RvvFloat32m2x3Ty, 4, 32, 3) +RVV_FTUPLE("__rvv_float32m2x4_t", RvvFloat32m2, RvvFloat32m2x4, RvvFloat32m2x4Ty, 4, 32, 4) +RVV_FTUPLE("__rvv_float32m4x2_t", RvvFloat32m4, RvvFloat32m4x2, RvvFloat32m4x2Ty, 8, 32, 2) + +RVV_FTUPLE("__rvv_float64m1x2_t", RvvFloat64m1, RvvFloat64m1x2, RvvFloat64m1x2Ty, 1, 64, 2) +RVV_FTUPLE("__rvv_float64m1x3_t", RvvFloat64m1, RvvFloat64m1x3, RvvFloat64m1x3Ty, 1, 64, 3) +RVV_FTUPLE("__rvv_float64m1x4_t", RvvFloat64m1, RvvFloat64m1x4, RvvFloat64m1x4Ty, 1, 64, 4) +RVV_FTUPLE("__rvv_float64m1x5_t", RvvFloat64m1, RvvFloat64m1x5, RvvFloat64m1x5Ty, 1, 64, 5) +RVV_FTUPLE("__rvv_float64m1x6_t", RvvFloat64m1, RvvFloat64m1x6, RvvFloat64m1x6Ty, 1, 64, 6) +RVV_FTUPLE("__rvv_float64m1x7_t", RvvFloat64m1, RvvFloat64m1x7, RvvFloat64m1x7Ty, 1, 64, 7) +RVV_FTUPLE("__rvv_float64m1x8_t", RvvFloat64m1, RvvFloat64m1x8, RvvFloat64m1x8Ty, 1, 64, 8) +RVV_FTUPLE("__rvv_float64m2x2_t", RvvFloat64m2, RvvFloat64m2x2, RvvFloat64m2x2Ty, 2, 64, 2) +RVV_FTUPLE("__rvv_float64m2x3_t", RvvFloat64m2, RvvFloat64m2x3, RvvFloat64m2x3Ty, 2, 64, 3) +RVV_FTUPLE("__rvv_float64m2x4_t", RvvFloat64m2, RvvFloat64m2x4, RvvFloat64m2x4Ty, 2, 64, 4) +RVV_FTUPLE("__rvv_float64m4x2_t", RvvFloat64m4, RvvFloat64m4x2, RvvFloat64m4x2Ty, 4, 64, 2) + +#undef RVV_FTUPLE +#undef RVV_ITUPLE +#undef RVV_TUPLE #undef RVV_VECTOR_TYPE_FLOAT #undef RVV_VECTOR_TYPE_INT #undef RVV_VECTOR_TYPE diff --git a/clang/lib/AST/ASTContext.cpp b/clang/lib/AST/ASTContext.cpp --- a/clang/lib/AST/ASTContext.cpp +++ b/clang/lib/AST/ASTContext.cpp @@ -1439,6 +1439,8 @@ if (Target.hasRISCVVTypes()) { #define RVV_TYPE(Name, Id, SingletonId) \ InitBuiltinType(SingletonId, BuiltinType::Id); +#define RVV_TUPLE(Name, ElemId, Id, SingletonId, NE, EB, NF, IsSigned, IsFP) \ + InitRVVTupleType(SingletonId, Name, BuiltinType::ElemId, NF); #include "clang/Basic/RISCVVTypes.def" } @@ -1478,6 +1480,36 @@ } } +void ASTContext::InitRVVTupleType(QualType &R, StringRef Name, + BuiltinType::Kind K, unsigned NF) { + auto TypeIter = llvm::find_if(Types, [&K](Type *Ty) { + if (Ty->isBuiltinType()) { + return cast(Ty)->getKind() == K; + } + return false; + }); + + if (TypeIter != Types.end()) { + RecordDecl *RD = buildImplicitRecord(Name); + RD->startDefinition(); + for (unsigned I = 0; I < NF; ++I) { + SmallVector FieldNameStorage; + FieldDecl *Field = FieldDecl::Create(*this, RD, SourceLocation(), + SourceLocation(), + &Idents.get( + (Twine("v") + Twine(I)).toStringRef(FieldNameStorage)), + QualType(*TypeIter, 0), nullptr, nullptr, false, + ICIS_NoInit); + Field->setAccess(AS_public); + RD->addDecl(Field); + } + RD->completeDefinition(); + + TypedefDecl *TD = buildImplicitTypedef(getTagDeclType(RD), Name); + R = getTypedefType(TD); + } +} + DiagnosticsEngine &ASTContext::getDiagnostics() const { return SourceMgr.getDiagnostics(); } @@ -3849,7 +3881,8 @@ /// type of the specified element type and size. VectorType must be a built-in /// type. QualType ASTContext::getScalableVectorType(QualType EltTy, - unsigned NumElts) const { + unsigned NumElts, + unsigned Tuple) const { if (Target->hasAArch64SVETypes()) { uint64_t EltTySize = getTypeSize(EltTy); #define SVE_VECTOR_TYPE(Name, MangledName, Id, SingletonId, NumEls, ElBits, \ @@ -3876,11 +3909,18 @@ ((EltTy->hasIntegerRepresentation() && \ EltTy->hasSignedIntegerRepresentation() == IsSigned) || \ (EltTy->hasFloatingRepresentation() && IsFP)) && \ - EltTySize == ElBits && NumElts == NumEls) \ + EltTySize == ElBits && NumElts == NumEls && Tuple == NF) \ return SingletonId; #define RVV_PREDICATE_TYPE(Name, Id, SingletonId, NumEls) \ if (EltTy->isBooleanType() && NumElts == NumEls) \ return SingletonId; +#define RVV_TUPLE(N, EId, Id, SingletonId, NumEls, ElBits, NF, IsSigned, IsFP) \ + if (!EltTy->isBooleanType() && \ + ((EltTy->hasIntegerRepresentation() && \ + EltTy->hasSignedIntegerRepresentation() == IsSigned) || \ + (EltTy->hasFloatingRepresentation() && IsFP)) && \ + EltTySize == ElBits && NumElts == NumEls && Tuple == NF) \ + return SingletonId; #include "clang/Basic/RISCVVTypes.def" } return QualType(); @@ -10167,6 +10207,7 @@ int HowLong = 0; bool Signed = false, Unsigned = false; RequiresICE = false; + int TupleSize = 0; // Read the prefixed modifiers first. bool Done = false; @@ -10254,6 +10295,9 @@ else HowLong = 2; break; + case 'T': + TupleSize++; + break; } } @@ -10372,7 +10416,8 @@ RequiresICE, false); assert(!RequiresICE && "Can't require vector ICE"); - Type = Context.getScalableVectorType(ElementType, NumElements); + Type = Context.getScalableVectorType(ElementType, NumElements, + TupleSize ? TupleSize : 1); break; } case 'V': { diff --git a/clang/lib/CodeGen/TargetInfo.cpp b/clang/lib/CodeGen/TargetInfo.cpp --- a/clang/lib/CodeGen/TargetInfo.cpp +++ b/clang/lib/CodeGen/TargetInfo.cpp @@ -10669,6 +10669,10 @@ } } + if (Ty->isRecordType() && !Ty->getAsRecordDecl()->field_empty() && + Ty->getAsRecordDecl()->field_begin()->getType()->isSizelessType()) + return ABIArgInfo::getDirect(CGT.ConvertType(Ty)); + uint64_t NeededAlign = getContext().getTypeAlign(Ty); bool MustUseStack = false; // Determine the number of GPRs needed to pass the current argument diff --git a/clang/lib/Sema/Sema.cpp b/clang/lib/Sema/Sema.cpp --- a/clang/lib/Sema/Sema.cpp +++ b/clang/lib/Sema/Sema.cpp @@ -389,6 +389,8 @@ if (Context.getTargetInfo().hasRISCVVTypes()) { #define RVV_TYPE(Name, Id, SingletonId) \ addImplicitTypedef(Name, Context.SingletonId); +#define RVV_TUPLE(Name, ElemId, Id, SingletonId, NumEls, ElBits, NF, IsSigned, false) \ + addImplicitTypedef(Name, Context.SingletonId); #include "clang/Basic/RISCVVTypes.def" } diff --git a/clang/test/Sema/riscv-types.c b/clang/test/Sema/riscv-types.c --- a/clang/test/Sema/riscv-types.c +++ b/clang/test/Sema/riscv-types.c @@ -133,4 +133,10 @@ // CHECK: __rvv_int8mf2_t x43; __rvv_int8mf2_t x43; + + // CHECK: __rvv_int8mf2x2_t x44; + __rvv_int8mf2x2_t x44; + + // CHECK: __rvv_int8m2x2_t x45; + __rvv_int8m2x2_t x45; }