diff --git a/clang/include/clang/Basic/riscv_vector.td b/clang/include/clang/Basic/riscv_vector.td --- a/clang/include/clang/Basic/riscv_vector.td +++ b/clang/include/clang/Basic/riscv_vector.td @@ -85,6 +85,11 @@ list> suffixes_prototypes> : RVVBuiltinSet; +// IntrinsicTypes is output, op2 [-1, 2] +multiclass RVVOutOp2BuiltinSet> suffixes_prototypes> + : RVVBuiltinSet; + multiclass RVVSignedBinBuiltinSet : RVVOutOp1BuiltinSet, + RVVOutOp1BuiltinSet; + multiclass RVVSlideOneBuiltinSet : RVVOutOp1BuiltinSet; +} + +multiclass RVVOutBuiltinSetZvk { + // vaesz only has 'vs' and vgmul only has 'vv' and they do not have ambiguous + // prototypes like other zvkned instructions (e.g. vaesdf), so we don't + // need to encode the operand mnemonics into its intrinsic function name. + if HasVV then { + defvar name = NAME # !if(!eq(NAME, "vgmul"), "", "_vv"); + let OverloadedName = name in + defm "" : RVVOutBuiltinSet; + } + + if HasVS then { + foreach vs2_lmul = ["(SEFixedLog2LMUL:-1)", "(SEFixedLog2LMUL:0)", + "(SEFixedLog2LMUL:1)", "(SEFixedLog2LMUL:2)", + "(SEFixedLog2LMUL:3)"] in { + defvar name = NAME # !if(!eq(NAME, "vaesz"), "", "_vs"); + let OverloadedName = name, IRName = NAME # "_vs", Name = NAME # "_vs", + IntrinsicTypes = [-1, 1] in + def NAME # vs2_lmul + : RVVBuiltin; + } + } +} + +multiclass RVVOutOp2BuiltinSetVVZvk + : RVVOutOp2BuiltinSet; + +multiclass RVVOutOp2BuiltinSetVIZvk + : RVVOutOp2BuiltinSet; + +multiclass RVVSignedWidenBinBuiltinSetVwsll + : RVVWidenBuiltinSet; + +let UnMaskedPolicyScheme = HasPassthruOperand in { + // zvbb + defm vandn : RVVUnsignedBinBuiltinSet; + defm vbrev : RVVOutBuiltinSetZvbb; + defm vbrev8 : RVVOutBuiltinSetZvbb; + defm vrev8 : RVVOutBuiltinSetZvbb; + defm vclz : RVVOutBuiltinSetZvbb; + defm vctz : RVVOutBuiltinSetZvbb; + defm vcpopv : RVVOutBuiltinSetZvbb; + defm vrol : RVVUnsignedShiftBuiltinSet; + defm vror : RVVUnsignedShiftBuiltinSet; + let OverloadedName = "vwsll" in + defm vwsll : RVVSignedWidenBinBuiltinSetVwsll; + + // zvbc + defm vclmul : RVVInt64BinBuiltinSet; + defm vclmulh : RVVInt64BinBuiltinSet; +} + +let UnMaskedPolicyScheme = HasPolicyOperand, HasMasked = false in { + // zvkg + defm vghsh : RVVOutOp2BuiltinSetVVZvk; + defm vgmul : RVVOutBuiltinSetZvk; + + // zvkned + defm vaesdf : RVVOutBuiltinSetZvk; + defm vaesdm : RVVOutBuiltinSetZvk; + defm vaesef : RVVOutBuiltinSetZvk; + defm vaesem : RVVOutBuiltinSetZvk; + let UnMaskedPolicyScheme = HasPassthruOperand in + defm vaeskf1 : RVVOutOp1BuiltinSet<"vaeskf1", "i", [["vi", "Uv", "UvUvKz"]]>; + defm vaeskf2 : RVVOutOp2BuiltinSetVIZvk; + defm vaesz : RVVOutBuiltinSetZvk; + + // zvknha or zvknhb + defm vsha2ch : RVVOutOp2BuiltinSetVVZvk<"il">; + defm vsha2cl : RVVOutOp2BuiltinSetVVZvk<"il">; + defm vsha2ms : RVVOutOp2BuiltinSetVVZvk<"il">; + + // zvksed + let UnMaskedPolicyScheme = HasPassthruOperand in + defm vsm4k : RVVOutOp1BuiltinSet<"vsm4k", "i", [["vi", "Uv", "UvUvKz"]]>; + defm vsm4r : RVVOutBuiltinSetZvk; + + // zvksh + defm vsm3c : RVVOutOp2BuiltinSetVIZvk; + let UnMaskedPolicyScheme = HasPassthruOperand in + defm vsm3me : RVVOutOp1BuiltinSet<"vsm3me", "i", [["vv", "Uv", "UvUvUv"]]>; +} diff --git a/clang/include/clang/Basic/riscv_vector_common.td b/clang/include/clang/Basic/riscv_vector_common.td --- a/clang/include/clang/Basic/riscv_vector_common.td +++ b/clang/include/clang/Basic/riscv_vector_common.td @@ -97,6 +97,11 @@ // and LMUL), and computes another vector type which only changed LMUL as // given value. The new LMUL should be smaller than the old one. Ignore to // define a new builtin if its equivalent type has illegal lmul. +// (SEFixedLog2LMUL:Value): Smaller or Equal Fixed Log2LMUL. Given a vector +// type (SEW and LMUL), and computes another vector type which only +// changed LMUL as given value. The new LMUL should be smaller than or +// equal to the old one. Ignore to define a new builtin if its equivalent +// type has illegal lmul. // (LFixedLog2LMUL:Value): Larger Fixed Log2LMUL. Given a vector type (SEW // and LMUL), and computes another vector type which only changed LMUL as // given value. The new LMUL should be larger than the old one. Ignore to diff --git a/clang/include/clang/Support/RISCVVIntrinsicUtils.h b/clang/include/clang/Support/RISCVVIntrinsicUtils.h --- a/clang/include/clang/Support/RISCVVIntrinsicUtils.h +++ b/clang/include/clang/Support/RISCVVIntrinsicUtils.h @@ -58,6 +58,13 @@ SFixedLog2LMUL1, SFixedLog2LMUL2, SFixedLog2LMUL3, + SEFixedLog2LMULN3, + SEFixedLog2LMULN2, + SEFixedLog2LMULN1, + SEFixedLog2LMUL0, + SEFixedLog2LMUL1, + SEFixedLog2LMUL2, + SEFixedLog2LMUL3, Tuple2, Tuple3, Tuple4, @@ -259,7 +266,7 @@ std::string Str; std::string ShortStr; - enum class FixedLMULType { LargerThan, SmallerThan }; + enum class FixedLMULType { LargerThan, SmallerThan, SmallerOrEqual }; RVVType(BasicType BT, int Log2LMUL, const PrototypeDescriptor &Profile); diff --git a/clang/lib/Sema/SemaChecking.cpp b/clang/lib/Sema/SemaChecking.cpp --- a/clang/lib/Sema/SemaChecking.cpp +++ b/clang/lib/Sema/SemaChecking.cpp @@ -86,6 +86,7 @@ #include "llvm/Support/MathExtras.h" #include "llvm/Support/SaveAndRestore.h" #include "llvm/Support/raw_ostream.h" +#include "llvm/TargetParser/RISCVTargetParser.h" #include "llvm/TargetParser/Triple.h" #include #include @@ -4513,6 +4514,27 @@ << Arg->getSourceRange(); } +static bool CheckInvalidVLENandLMUL(const TargetInfo &TI, CallExpr *TheCall, + Sema &S, QualType Type, int EGW) { + assert((EGW == 128 || EGW == 256) && "EGW can only be 128 or 256 bits"); + + // LMUL * VLEN >= EGW + uint64_t ElemSize = Type->isRVVType(32, false) ? 32 : 64; + uint64_t ElemCount = Type->isRVVType(1) ? 1 : + Type->isRVVType(2) ? 2 : + Type->isRVVType(4) ? 4 : + Type->isRVVType(8) ? 8 : + 16; + float Lmul = (float)(ElemSize * ElemCount) / llvm::RISCV::RVVBitsPerBlock; + uint64_t MinRequiredVLEN = std::max(EGW / Lmul, (float)ElemSize); + std::string RequiredExt = "zvl" + std::to_string(MinRequiredVLEN) + "b"; + if (!TI.hasFeature(RequiredExt)) + return S.Diag(TheCall->getBeginLoc(), + diag::err_riscv_type_requires_extension) << Type << RequiredExt; + + return false; +} + bool Sema::CheckRISCVBuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID, CallExpr *TheCall) { @@ -4671,6 +4693,76 @@ (VecInfo.EC.getKnownMinValue() * VecInfo.NumVectors); return SemaBuiltinConstantArgRange(TheCall, 1, 0, MaxIndex - 1); } + // Vector Crypto + case RISCVVector::BI__builtin_rvv_vaeskf1_vi_tu: + case RISCVVector::BI__builtin_rvv_vaeskf2_vi_tu: + case RISCVVector::BI__builtin_rvv_vaeskf2_vi: + case RISCVVector::BI__builtin_rvv_vsm4k_vi_tu: { + QualType Op1Type = TheCall->getArg(0)->getType(); + QualType Op2Type = TheCall->getArg(1)->getType(); + return CheckInvalidVLENandLMUL(TI, TheCall, *this, Op1Type, 128) || + CheckInvalidVLENandLMUL(TI, TheCall, *this, Op2Type, 128) || + SemaBuiltinConstantArgRange(TheCall, 2, 0, 31); + } + case RISCVVector::BI__builtin_rvv_vsm3c_vi_tu: + case RISCVVector::BI__builtin_rvv_vsm3c_vi: { + QualType Op1Type = TheCall->getArg(0)->getType(); + return CheckInvalidVLENandLMUL(TI, TheCall, *this, Op1Type, 256) || + SemaBuiltinConstantArgRange(TheCall, 2, 0, 31); + } + case RISCVVector::BI__builtin_rvv_vaeskf1_vi: + case RISCVVector::BI__builtin_rvv_vsm4k_vi: { + QualType Op1Type = TheCall->getArg(0)->getType(); + return CheckInvalidVLENandLMUL(TI, TheCall, *this, Op1Type, 128) || + SemaBuiltinConstantArgRange(TheCall, 1, 0, 31); + } + case RISCVVector::BI__builtin_rvv_vaesdf_vv: + case RISCVVector::BI__builtin_rvv_vaesdf_vs: + case RISCVVector::BI__builtin_rvv_vaesdm_vv: + case RISCVVector::BI__builtin_rvv_vaesdm_vs: + case RISCVVector::BI__builtin_rvv_vaesef_vv: + case RISCVVector::BI__builtin_rvv_vaesef_vs: + case RISCVVector::BI__builtin_rvv_vaesem_vv: + case RISCVVector::BI__builtin_rvv_vaesem_vs: + case RISCVVector::BI__builtin_rvv_vaesz_vs: + case RISCVVector::BI__builtin_rvv_vsm4r_vv: + case RISCVVector::BI__builtin_rvv_vsm4r_vs: + case RISCVVector::BI__builtin_rvv_vaesdf_vv_tu: + case RISCVVector::BI__builtin_rvv_vaesdf_vs_tu: + case RISCVVector::BI__builtin_rvv_vaesdm_vv_tu: + case RISCVVector::BI__builtin_rvv_vaesdm_vs_tu: + case RISCVVector::BI__builtin_rvv_vaesef_vv_tu: + case RISCVVector::BI__builtin_rvv_vaesef_vs_tu: + case RISCVVector::BI__builtin_rvv_vaesem_vv_tu: + case RISCVVector::BI__builtin_rvv_vaesem_vs_tu: + case RISCVVector::BI__builtin_rvv_vaesz_vs_tu: + case RISCVVector::BI__builtin_rvv_vsm4r_vv_tu: + case RISCVVector::BI__builtin_rvv_vsm4r_vs_tu: { + QualType Op1Type = TheCall->getArg(0)->getType(); + QualType Op2Type = TheCall->getArg(1)->getType(); + return CheckInvalidVLENandLMUL(TI, TheCall, *this, Op1Type, 128) || + CheckInvalidVLENandLMUL(TI, TheCall, *this, Op2Type, 128); + } + case RISCVVector::BI__builtin_rvv_vsha2ch_vv: + case RISCVVector::BI__builtin_rvv_vsha2cl_vv: + case RISCVVector::BI__builtin_rvv_vsha2ms_vv: + case RISCVVector::BI__builtin_rvv_vsha2ch_vv_tu: + case RISCVVector::BI__builtin_rvv_vsha2cl_vv_tu: + case RISCVVector::BI__builtin_rvv_vsha2ms_vv_tu: { + QualType Op1Type = TheCall->getArg(0)->getType(); + QualType Op2Type = TheCall->getArg(1)->getType(); + QualType Op3Type = TheCall->getArg(2)->getType(); + uint64_t ElemSize = Op1Type->isRVVType(32, false) ? 32 : 64; + if (ElemSize == 64 && !TI.hasFeature("experimental-zvknhb")) + return + Diag(TheCall->getBeginLoc(), diag::err_riscv_type_requires_extension) + << Op1Type << "experimental-zvknhb"; + + return CheckInvalidVLENandLMUL(TI, TheCall, *this, Op1Type, ElemSize << 2) || + CheckInvalidVLENandLMUL(TI, TheCall, *this, Op2Type, ElemSize << 2) || + CheckInvalidVLENandLMUL(TI, TheCall, *this, Op3Type, ElemSize << 2); + } + case RISCVVector::BI__builtin_rvv_sf_vc_i_se_u8mf8: case RISCVVector::BI__builtin_rvv_sf_vc_i_se_u8mf4: case RISCVVector::BI__builtin_rvv_sf_vc_i_se_u8mf2: diff --git a/clang/lib/Support/RISCVVIntrinsicUtils.cpp b/clang/lib/Support/RISCVVIntrinsicUtils.cpp --- a/clang/lib/Support/RISCVVIntrinsicUtils.cpp +++ b/clang/lib/Support/RISCVVIntrinsicUtils.cpp @@ -559,6 +559,38 @@ return std::nullopt; } + } else if (ComplexTT.first == "SEFixedLog2LMUL") { + int32_t Log2LMUL; + if (ComplexTT.second.getAsInteger(10, Log2LMUL)) { + llvm_unreachable("Invalid SEFixedLog2LMUL value!"); + return std::nullopt; + } + switch (Log2LMUL) { + case -3: + VTM = VectorTypeModifier::SEFixedLog2LMULN3; + break; + case -2: + VTM = VectorTypeModifier::SEFixedLog2LMULN2; + break; + case -1: + VTM = VectorTypeModifier::SEFixedLog2LMULN1; + break; + case 0: + VTM = VectorTypeModifier::SEFixedLog2LMUL0; + break; + case 1: + VTM = VectorTypeModifier::SEFixedLog2LMUL1; + break; + case 2: + VTM = VectorTypeModifier::SEFixedLog2LMUL2; + break; + case 3: + VTM = VectorTypeModifier::SEFixedLog2LMUL3; + break; + default: + llvm_unreachable("Invalid LFixedLog2LMUL value, should be [-3, 3]"); + return std::nullopt; + } } else if (ComplexTT.first == "Tuple") { unsigned NF = 0; if (ComplexTT.second.getAsInteger(10, NF)) { @@ -726,6 +758,27 @@ case VectorTypeModifier::SFixedLog2LMUL3: applyFixedLog2LMUL(3, FixedLMULType::SmallerThan); break; + case VectorTypeModifier::SEFixedLog2LMULN3: + applyFixedLog2LMUL(-3, FixedLMULType::SmallerOrEqual); + break; + case VectorTypeModifier::SEFixedLog2LMULN2: + applyFixedLog2LMUL(-2, FixedLMULType::SmallerOrEqual); + break; + case VectorTypeModifier::SEFixedLog2LMULN1: + applyFixedLog2LMUL(-1, FixedLMULType::SmallerOrEqual); + break; + case VectorTypeModifier::SEFixedLog2LMUL0: + applyFixedLog2LMUL(0, FixedLMULType::SmallerOrEqual); + break; + case VectorTypeModifier::SEFixedLog2LMUL1: + applyFixedLog2LMUL(1, FixedLMULType::SmallerOrEqual); + break; + case VectorTypeModifier::SEFixedLog2LMUL2: + applyFixedLog2LMUL(2, FixedLMULType::SmallerOrEqual); + break; + case VectorTypeModifier::SEFixedLog2LMUL3: + applyFixedLog2LMUL(3, FixedLMULType::SmallerOrEqual); + break; case VectorTypeModifier::Tuple2: case VectorTypeModifier::Tuple3: case VectorTypeModifier::Tuple4: @@ -818,6 +871,12 @@ return; } break; + case FixedLMULType::SmallerOrEqual: + if (Log2LMUL > LMUL.Log2LMUL) { + ScalarType = ScalarTypeKind::Invalid; + return; + } + break; } // Update new LMUL diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesdf.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesdf.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesdf.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m4_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m4_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m8_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesdm.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesdm.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesdm.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m4_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m4_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m8_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesef.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesef.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesef.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m4_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m4_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m8_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesem.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesem.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesem.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m4_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m4_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m8_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaeskf1.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaeskf1.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaeskf1.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv1i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf1_vi_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32mf2(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv2i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf1_vi_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m1(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv4i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf1_vi_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m2(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv8i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf1_vi_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m4(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv16i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf1_vi_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m8(vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaeskf2.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaeskf2.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaeskf2.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf2_vi_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32mf2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf2_vi_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m1(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf2_vi_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf2_vi_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m4(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf2_vi_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m8(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaesz.c @@ -0,0 +1,165 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesz_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m4_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m4_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m8_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vandn.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vandn.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vandn.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8(vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4(vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2(vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1(vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2(vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4(vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8(vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4(vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2(vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1(vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2(vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4(vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8(vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2(vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1(vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2(vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4(vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8(vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1(vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2(vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4(vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8(vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m8_m(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vbrev.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vbrev.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vbrev.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m8_m(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vbrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vbrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vbrev8.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m8_m(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclmul.c @@ -0,0 +1,175 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1(vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2(vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4(vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8(vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m8_m(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclmulh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclmulh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclmulh.c @@ -0,0 +1,175 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1(vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2(vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4(vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8(vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m8_m(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vclz.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_v_u8m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_v_u8m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_v_u8m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_v_u8m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_v_u16m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_v_u16m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_v_u16m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_v_u16m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u32mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_v_u32m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_v_u32m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_v_u32m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_v_u32m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_v_u64m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_v_u64m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_v_u64m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_v_u64m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_v_u8m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_v_u8m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_v_u8m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_v_u8m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_v_u16m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_v_u16m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_v_u16m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_v_u16m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u32mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_v_u32m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_v_u32m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_v_u32m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_v_u32m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_v_u64m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_v_u64m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_v_u64m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_v_u64m8_m(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vcpopv.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vcpopv.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vcpopv.c @@ -0,0 +1,402 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vcpopv_v_u8mf8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf8(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf4(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m1(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m4(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv64i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m8(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf4(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m1(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m4(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m8(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32mf2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m1(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m4(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m8(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m1(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m2(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m4(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m8(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf8_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf4_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m1_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m4_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m8_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf4_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m1_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m4_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m8_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32mf2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m1_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m4_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m8_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m1_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m2_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m4_m(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m8_m(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vctz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vctz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vctz.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_v_u8m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_v_u8m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_v_u8m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_v_u8m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_v_u16m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_v_u16m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_v_u16m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_v_u16m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u32mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_v_u32m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_v_u32m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_v_u32m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_v_u32m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_v_u64m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_v_u64m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_v_u64m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_v_u64m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_v_u8m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_v_u8m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_v_u8m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_v_u8m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_v_u16m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_v_u16m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_v_u16m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_v_u16m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u32mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_v_u32m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_v_u32m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_v_u32m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_v_u32m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_v_u64m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_v_u64m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_v_u64m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_v_u64m8_m(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vghsh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vghsh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vghsh.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vghsh_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32mf2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vghsh_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vghsh_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vghsh_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vghsh_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m8(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vgmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vgmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vgmul.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vgmul_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vgmul_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vgmul_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vgmul_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vgmul_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vrev8.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32mf2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m1(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m2(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m4(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32mf2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m8_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m1_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m2_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m4_m(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m8_m(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vrol.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vrol.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vrol.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8(vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4(vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2(vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1(vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2(vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4(vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8(vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4(vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2(vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1(vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2(vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4(vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8(vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2(vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1(vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2(vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4(vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8(vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1(vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2(vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4(vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8(vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m8_m(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vror.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vror.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vror.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8(vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4(vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2(vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_vv_u8m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1(vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_vv_u8m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2(vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_vv_u8m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4(vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_vv_u8m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8(vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4(vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2(vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_vv_u16m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1(vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_vv_u16m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2(vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_vv_u16m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4(vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_vv_u16m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8(vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u32mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2(vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_vv_u32m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1(vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_vv_u32m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2(vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_vv_u32m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4(vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_vv_u32m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8(vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_vv_u64m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1(vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_vv_u64m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2(vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_vv_u64m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4(vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_vv_u64m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8(vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_vv_u8m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_vv_u8m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_vv_u8m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_vv_u8m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_vv_u16m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_vv_u16m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_vv_u16m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_vv_u16m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u32mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_vv_u32m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_vv_u32m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_vv_u32m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_vv_u32m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_vv_u64m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_vv_u64m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_vv_u64m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_vv_u64m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m8_m(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2ch.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2ch.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2ch.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ch_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32mf2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ch_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ch_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ch_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ch_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m8(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ch_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ch_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ch_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ch_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m8(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2cl.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2cl.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2cl.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2cl_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32mf2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2cl_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2cl_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2cl_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2cl_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m8(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2cl_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2cl_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2cl_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2cl_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m8(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2ms.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2ms.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsha2ms.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ms_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32mf2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ms_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ms_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ms_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ms_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m8(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ms_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m1(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ms_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m2(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ms_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m4(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ms_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m8(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm3c.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm3c.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm3c.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3c_vi_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32mf2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3c_vi_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m1(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3c_vi_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3c_vi_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m4(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3c_vi_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m8(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm3me.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm3me.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm3me.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3me_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3me_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3me_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3me_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3me_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m8(vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm4k.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm4k.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm4k.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv1i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4k_vi_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32mf2(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv2i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4k_vi_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m1(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv4i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4k_vi_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m2(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv8i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4k_vi_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m4(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv16i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4k_vi_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m8(vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm4r.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm4r.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vsm4r.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32mf2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m1(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m2_u32m2(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m2_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m2_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m4_u32m4(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m4_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m8(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m8_u32m8(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vwsll.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vwsll.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vwsll.c @@ -0,0 +1,615 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4(vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2(vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1(vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2(vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4(vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8(vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32mf2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2(vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32mf2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1(vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2(vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4(vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8(vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m1(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1(vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m1(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m2(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2(vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m2(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m4(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4(vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m4(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m8(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8(vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m8(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32mf2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32mf2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m8_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m1_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m1_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m2_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m2_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m4_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m4_m(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m8_m(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m8_m(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesdf.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesdf.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesdf.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vs(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesdm.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesdm.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesdm.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vs(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesef.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesef.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesef.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vs(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesem.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesem.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesem.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vs(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaeskf1.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaeskf1.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaeskf1.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv1i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf1_vi_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf1(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv2i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf1_vi_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf1(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv4i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf1_vi_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf1(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv8i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf1_vi_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf1(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv16i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf1_vi_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf1(vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaeskf2.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaeskf2.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaeskf2.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf2_vi_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf2_vi_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf2_vi_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf2_vi_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf2(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf2_vi_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf2(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaesz.c @@ -0,0 +1,165 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesz_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesz(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vandn.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vandn.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vandn.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8(vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4(vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2(vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1(vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2(vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4(vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.i8.i64( poison, [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8(vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4(vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2(vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1(vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2(vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4(vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.i16.i64( poison, [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8(vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2(vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1(vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2(vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4(vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.i32.i64( poison, [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8(vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1(vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2(vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4(vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8(vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( poison, [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( poison, [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( poison, [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vbrev.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vbrev.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vbrev.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vbrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vbrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vbrev8.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclmul.c @@ -0,0 +1,175 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1(vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2(vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4(vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8(vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclmulh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclmulh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclmulh.c @@ -0,0 +1,175 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1(vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2(vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4(vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8(vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vclz.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vclz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclz_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vcpopv.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vcpopv.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vcpopv.c @@ -0,0 +1,402 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vcpopv_v_u8mf8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv64i8.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i16.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i32.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i64.i64( poison, [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv(vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_m( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( poison, [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 3) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vctz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vctz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vctz.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vctz(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vctz_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vghsh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vghsh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vghsh.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vghsh_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vghsh(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vghsh_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vghsh(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vghsh_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vghsh(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vghsh_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vghsh(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vghsh_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vghsh(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vgmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vgmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vgmul.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vgmul_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vgmul(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vgmul_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vgmul(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vgmul_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vgmul(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vgmul_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vgmul(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vgmul_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vgmul(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vrev8.c @@ -0,0 +1,455 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8(vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4(vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2(vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1(vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2(vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4(vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv64i8.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8(vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4(vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2(vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1(vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2(vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4(vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i16.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8(vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i32.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1(vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2(vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4(vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i64.i64( poison, [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8(vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8(vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( poison, [[VS2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8(mask, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vrol.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vrol.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vrol.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8(vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4(vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2(vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1(vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2(vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4(vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8(vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4(vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2(vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1(vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2(vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4(vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8(vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2(vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1(vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2(vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4(vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8(vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1(vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2(vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4(vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8(vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vror.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vror.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vror.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8(vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4(vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2(vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1(vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2(vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4(vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8(vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8(vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4(vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2(vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1(vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2(vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4(vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8(vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8(vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2(vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1(vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2(vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4(vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8(vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1(vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1(vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2(vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2(vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4(vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4(vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8(vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8(vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_m(vbool1_t mask, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_m(vbool1_t mask, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_m(vbool2_t mask, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_m(vbool2_t mask, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_m(vbool4_t mask, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_m(vbool4_t mask, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_m(vbool64_t mask, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_m(vbool64_t mask, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_m(vbool32_t mask, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_m(vbool32_t mask, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_m(vbool16_t mask, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_m(vbool16_t mask, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_m(vbool8_t mask, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_m(vbool8_t mask, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2ch.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2ch.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2ch.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ch_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ch_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ch_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ch_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ch_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ch_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ch_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ch_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ch_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ch(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2cl.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2cl.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2cl.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2cl_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2cl_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2cl_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2cl_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2cl_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2cl_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2cl_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2cl_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2cl_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2cl(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2ms.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2ms.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsha2ms.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ms_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ms_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ms_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ms_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ms_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ms_vv_u64m1(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ms_vv_u64m2(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ms_vv_u64m4(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ms_vv_u64m8(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ms(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm3c.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm3c.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm3c.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3c_vi_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm3c(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3c_vi_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm3c(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3c_vi_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm3c(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3c_vi_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm3c(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3c_vi_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm3c(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm3me.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm3me.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm3me.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3me_vv_u32mf2(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsm3me(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3me_vv_u32m1(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsm3me(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3me_vv_u32m2(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsm3me(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3me_vv_u32m4(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsm3me(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv16i32.nxv16i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3me_vv_u32m8(vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsm3me(vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm4k.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm4k.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm4k.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv1i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4k_vi_u32mf2(vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4k(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv2i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4k_vi_u32m1(vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4k(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv4i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4k_vi_u32m2(vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4k(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv8i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4k_vi_u32m4(vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4k(vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv16i32.i64.i64( poison, [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4k_vi_u32m8(vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4k(vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm4r.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm4r.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vsm4r.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vv_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32mf2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vs_u32mf2_u32mf2(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32mf2_u32m1(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32mf2_u32m2(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32mf2_u32m4(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32mf2_u32m8(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vv_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m1 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32m1_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m1_u32m2(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m1_u32m4(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m1_u32m8(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vv_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m2 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m2_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m2_u32m4(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m2_u32m8(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vv_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m4 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m4_u32m4(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m4_u32m8(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vv_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vv(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m8_u32m8 +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m8_u32m8(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vs(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vwsll.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vwsll.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vwsll.c @@ -0,0 +1,615 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4(vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4(vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2(vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2(vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1(vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1(vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2(vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2(vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4(vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4(vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8(vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8(vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2(vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2(vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1(vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1(vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2(vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2(vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4(vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4(vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8(vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8(vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1(vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1(vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2(vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2(vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4(vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4(vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8(vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll(vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8 +// CHECK-RV64-SAME: ( [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8(vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_m(vbool64_t mask, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_m(vbool64_t mask, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_m(vbool32_t mask, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_m(vbool32_t mask, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_m(vbool16_t mask, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_m(vbool16_t mask, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_m(vbool8_t mask, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_m(vbool8_t mask, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_m(vbool4_t mask, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_m(vbool4_t mask, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_m(vbool2_t mask, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_m(vbool2_t mask, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_m(vbool64_t mask, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_m(vbool64_t mask, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_m(vbool32_t mask, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_m(vbool32_t mask, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_m(vbool16_t mask, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_m(vbool16_t mask, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_m(vbool8_t mask, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_m(vbool8_t mask, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_m(vbool4_t mask, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_m(vbool4_t mask, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_m(vbool64_t mask, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_m(vbool64_t mask, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_m(vbool32_t mask, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_m(vbool32_t mask, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_m(vbool16_t mask, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_m(vbool16_t mask, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( poison, [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_m(vbool8_t mask, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll(mask, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( poison, [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_m(vbool8_t mask, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll(mask, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesdf.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesdf.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesdf.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32mf2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m1_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m4_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m4_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vv_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vs_u32m8_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesdm.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesdm.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesdm.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32mf2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m1_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m4_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m4_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vv_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vs_u32m8_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesef.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesef.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesef.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32mf2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m1_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m4_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m4_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vv_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vs_u32m8_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesem.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesem.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesem.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32mf2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m1_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m4_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m4_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vv_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vs_u32m8_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaeskf1.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaeskf1.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaeskf1.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf1_vi_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32mf2_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf1_vi_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m1_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf1_vi_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m2_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf1_vi_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m4_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf1_vi_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf1_vi_u32m8_tu(maskedoff, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaeskf2.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaeskf2.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaeskf2.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf2_vi_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32mf2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf2_vi_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m1_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf2_vi_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf2_vi_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m4_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf2_vi_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf2_vi_u32m8_tu(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaesz.c @@ -0,0 +1,165 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesz_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32mf2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m1_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m4_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m4_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesz_vs_u32m8_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vandn.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vandn.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vandn.c @@ -0,0 +1,1775 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u8m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_vx_u8m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u16m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_vx_u16m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u32m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_vx_u32m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_vv_u64m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_vx_u64m8_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vbrev.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vbrev.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vbrev.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u8m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u16m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u32m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_v_u64m8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vbrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vbrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vbrev8.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u8m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u16m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u32m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_v_u64m8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclmul.c @@ -0,0 +1,335 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_vv_u64m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_vx_u64m8_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclmulh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclmulh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclmulh.c @@ -0,0 +1,335 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_vv_u64m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_vx_u64m8_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vclz.c @@ -0,0 +1,798 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vclz_v_u8mf8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_v_u8m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_v_u8m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_v_u8m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_v_u8m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_v_u16m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_v_u16m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_v_u16m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_v_u16m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u32mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_v_u32m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_v_u32m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_v_u32m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_v_u32m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_v_u64m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_v_u64m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_v_u64m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_v_u64m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_v_u8m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_v_u8m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_v_u8m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_v_u8m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_v_u16m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_v_u16m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_v_u16m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_v_u16m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u32mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_v_u32m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_v_u32m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_v_u32m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_v_u32m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_v_u64m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_v_u64m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_v_u64m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_v_u64m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_v_u8m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_v_u8m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_v_u8m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_v_u8m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_v_u16m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_v_u16m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_v_u16m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_v_u16m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u32mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_v_u32m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_v_u32m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_v_u32m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_v_u32m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_v_u64m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_v_u64m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_v_u64m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_v_u64m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u8mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_v_u8m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_v_u8m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_v_u8m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_v_u8m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u16mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_v_u16m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_v_u16m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_v_u16m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_v_u16m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_v_u32mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_v_u32m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_v_u32m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_v_u32m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_v_u32m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_v_u64m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_v_u64m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_v_u64m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_v_u64m8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vcpopv.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vcpopv.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vcpopv.c @@ -0,0 +1,798 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u8m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u16m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u32m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_v_u64m8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vctz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vctz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vctz.c @@ -0,0 +1,798 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vctz_v_u8mf8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_v_u8m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_v_u8m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_v_u8m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_v_u8m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_v_u16m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_v_u16m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_v_u16m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_v_u16m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u32mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_v_u32m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_v_u32m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_v_u32m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_v_u32m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_v_u64m1_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_v_u64m2_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_v_u64m4_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_v_u64m8_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_v_u8m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_v_u8m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_v_u8m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_v_u8m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_v_u16m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_v_u16m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_v_u16m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_v_u16m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u32mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_v_u32m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_v_u32m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_v_u32m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_v_u32m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_v_u64m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_v_u64m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_v_u64m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_v_u64m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_v_u8m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_v_u8m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_v_u8m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_v_u8m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_v_u16m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_v_u16m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_v_u16m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_v_u16m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u32mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_v_u32m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_v_u32m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_v_u32m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_v_u32m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_v_u64m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_v_u64m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_v_u64m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_v_u64m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u8mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_v_u8m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_v_u8m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_v_u8m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_v_u8m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u16mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_v_u16m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_v_u16m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_v_u16m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_v_u16m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_v_u32mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_v_u32m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_v_u32m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_v_u32m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_v_u32m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_v_u64m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_v_u64m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_v_u64m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_v_u64m8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vghsh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vghsh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vghsh.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vghsh_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32mf2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vghsh_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vghsh_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vghsh_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vghsh_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vghsh_vv_u32m8_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vgmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vgmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vgmul.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vgmul_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vgmul_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vgmul_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vgmul_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vgmul_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vgmul_vv_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vrev8.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32mf2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m1_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m2_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m4_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32mf2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m1_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m2_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m4_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32mf2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m1_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m2_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m4_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u8m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u16m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32mf2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u32m8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m1_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m2_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m4_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_v_u64m8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vrol.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vrol.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vrol.c @@ -0,0 +1,1775 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u8m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u8m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u16m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u16m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u32m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u32m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_vv_u64m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_vx_u64m8_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vror.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vror.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vror.c @@ -0,0 +1,1775 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_vv_u8m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_vv_u8m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_vv_u8m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_vv_u8m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_vv_u16m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_vv_u16m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_vv_u16m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_vv_u16m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u32mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_vv_u32m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_vv_u32m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_vv_u32m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_vv_u32m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_vv_u64m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_vv_u64m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_vv_u64m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_vv_u64m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_vv_u8m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_vv_u8m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_vv_u8m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_vv_u8m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_vv_u16m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_vv_u16m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_vv_u16m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_vv_u16m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u32mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_vv_u32m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_vv_u32m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_vv_u32m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_vv_u32m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_vv_u64m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_vv_u64m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_vv_u64m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_vv_u64m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_vv_u8m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_vv_u8m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_vv_u8m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_vv_u8m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_vv_u16m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_vv_u16m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_vv_u16m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_vv_u16m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u32mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_vv_u32m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_vv_u32m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_vv_u32m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_vv_u32m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_vv_u64m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_vv_u64m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_vv_u64m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_vv_u64m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u8mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_vv_u8m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_vv_u8m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_vv_u8m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_vv_u8m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u8m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u16mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_vv_u16m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_vv_u16m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_vv_u16m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_vv_u16m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u16m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_vv_u32mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_vv_u32m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_vv_u32m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_vv_u32m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_vv_u32m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u32m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_vv_u64m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_vv_u64m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_vv_u64m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_vv_u64m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_vx_u64m8_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2ch.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2ch.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2ch.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ch_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32mf2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ch_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ch_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ch_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ch_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u32m8_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ch_vv_u64m1_tu(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ch_vv_u64m2_tu(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ch_vv_u64m4_tu(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ch_vv_u64m8_tu(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ch_vv_u64m8_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2cl.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2cl.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2cl.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2cl_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32mf2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2cl_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2cl_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2cl_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2cl_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u32m8_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2cl_vv_u64m1_tu(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2cl_vv_u64m2_tu(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2cl_vv_u64m4_tu(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2cl_vv_u64m8_tu(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2cl_vv_u64m8_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2ms.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2ms.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsha2ms.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ms_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32mf2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ms_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ms_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ms_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ms_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u32m8_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ms_vv_u64m1_tu(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m1_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ms_vv_u64m2_tu(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m2_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ms_vv_u64m4_tu(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m4_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ms_vv_u64m8_tu(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ms_vv_u64m8_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm3c.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm3c.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm3c.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3c_vi_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32mf2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3c_vi_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m1_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3c_vi_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3c_vi_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m4_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3c_vi_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm3c_vi_u32m8_tu(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm3me.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm3me.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm3me.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3me_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3me_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3me_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3me_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3me_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsm3me_vv_u32m8_tu(maskedoff, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm4k.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm4k.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm4k.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4k_vi_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32mf2_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4k_vi_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m1_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4k_vi_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m2_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4k_vi_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m4_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4k_vi_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4k_vi_u32m8_tu(maskedoff, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm4r.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm4r.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vsm4r.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32mf2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32mf2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m1_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m1_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m2_u32m2_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m2_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m2_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m4_u32m4_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m4_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vv_u32m8_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vs_u32m8_u32m8_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vwsll.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vwsll.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vwsll.c @@ -0,0 +1,1215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_tu(vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_tu(vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_tu(vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_tu(vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_tu(vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_tu(vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_tu(vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_tu(vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32mf2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32mf2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_tu(vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_tu(vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_tu(vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_tu(vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_tu(vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_tu(vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_tu(vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_tu(vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_tu(vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m1_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_tu(vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m1_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_tu(vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m2_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_tu(vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m2_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_tu(vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m4_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_tu(vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m4_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_tu(vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m8_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_tu(vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m8_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32mf2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32mf2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m1_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m1_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m2_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m2_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m4_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m4_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m8_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m8_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32mf2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32mf2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m1_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m1_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m2_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m2_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m4_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m4_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m8_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m8_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u16m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u16m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32mf2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32mf2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u32m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u32m8_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m1_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m1_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m2_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m2_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m4_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m4_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_vv_u64m8_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_vx_u64m8_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesdf.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesdf.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesdf.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdf_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdf_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdf_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdf_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdf_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdf.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdf_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdf_vs_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesdm.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesdm.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesdm.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesdm_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesdm_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesdm_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesdm_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesdm_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesdm.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesdm_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesdm_vs_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesef.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesef.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesef.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesef_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesef_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesef_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesef_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesef_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesef.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesef_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesef_vs_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesem.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesem.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesem.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesem_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesem_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesem_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesem_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesem_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesem.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesem_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesem_vs_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaeskf1.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaeskf1.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaeskf1.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf1_vi_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf1_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf1_vi_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf1_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf1_vi_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf1_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf1_vi_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf1_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf1_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf1.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf1_vi_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf1_tu(maskedoff, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaeskf2.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaeskf2.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaeskf2.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaeskf2_vi_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaeskf2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaeskf2_vi_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaeskf2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaeskf2_vi_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaeskf2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaeskf2_vi_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaeskf2_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaeskf2_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaeskf2.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaeskf2_vi_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaeskf2_tu(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaesz.c @@ -0,0 +1,165 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vaesz_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vaesz_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vaesz_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vaesz_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vaesz_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaesz.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vaesz_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vaesz_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vandn.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vandn.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vandn.c @@ -0,0 +1,1775 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vandn_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vandn_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vandn_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vandn_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vandn_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vandn_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i8 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[VS2]], i8 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vandn_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, uint8_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vandn_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vandn_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vandn_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vandn_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vandn_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i16 noundef zeroext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[VS2]], i16 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vandn_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, uint16_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vandn_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vandn_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vandn_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vandn_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i32 noundef signext [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[VS2]], i32 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vandn_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, uint32_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vandn_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vandn_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vandn_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vandn_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vandn.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vandn_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vandn_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vbrev.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vbrev.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vbrev.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev_v_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vbrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vbrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vbrev8.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vbrev8_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vbrev8_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vbrev8_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vbrev8_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vbrev8_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vbrev8_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vbrev8_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vbrev8_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vbrev8_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vbrev8_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vbrev8_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vbrev8_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vbrev8_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vbrev8_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vbrev8_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vbrev8_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vbrev8_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vbrev8_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vbrev8_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vbrev8_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vbrev8_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vbrev8_v_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vbrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vbrev8_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vbrev8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclmul.c @@ -0,0 +1,335 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmul_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmul_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmul_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmul_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmul.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmul_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmul_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclmulh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclmulh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclmulh.c @@ -0,0 +1,335 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclmulh_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclmulh_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclmulh_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vclmulh_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclmulh.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclmulh_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, uint64_t rs1, size_t vl) { + return __riscv_vclmulh_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vclz.c @@ -0,0 +1,798 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vclz_v_u8mf8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vclz_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vclz_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vclz_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vclz_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vclz_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vclz_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u8m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vclz_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vclz_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vclz_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vclz_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vclz_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vclz_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u16m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vclz_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vclz_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vclz_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vclz_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vclz_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u32m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vclz_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vclz_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vclz_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vclz_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vclz_v_u64m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vclz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vclz_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vclz_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vcpopv.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vcpopv.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vcpopv.c @@ -0,0 +1,798 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vcpopv_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vcpopv_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vcpopv_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vcpopv_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vcpopv_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vcpopv_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u8m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vcpopv_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vcpopv_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vcpopv_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vcpopv_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vcpopv_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vcpopv_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u16m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vcpopv_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vcpopv_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vcpopv_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vcpopv_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vcpopv_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u32m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vcpopv_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vcpopv_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vcpopv_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vcpopv_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vcpopv_v_u64m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vcpopv.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vcpopv_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vcpopv_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vctz.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vctz.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vctz.c @@ -0,0 +1,798 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -disable-O0-optnone -emit-llvm %s -o - | opt -S -passes=mem2reg | FileCheck %s + +#include + +// CHECK-LABEL: @test_vctz_v_u8mf8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_tu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 [[VL:%.*]]) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_tu(maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_tum( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 2) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_tumu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 0) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vctz_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vctz_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vctz_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vctz_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vctz_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vctz_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u8m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv64i8.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vctz_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vctz_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vctz_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vctz_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vctz_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vctz_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u16m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv32i16.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vctz_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32mf2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vctz_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vctz_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vctz_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vctz_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u32m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv16i32.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vctz_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m1_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv1i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vctz_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m2_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv2i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vctz_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m4_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv4i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vctz_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-LABEL: @test_vctz_v_u64m8_mu( +// CHECK-NEXT: entry: +// CHECK-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vctz.mask.nxv8i64.i64( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[MASK:%.*]], i64 [[VL:%.*]], i64 1) +// CHECK-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vctz_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vctz_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vghsh.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vghsh.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vghsh.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vghsh_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vghsh_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vghsh_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vghsh_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vghsh_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vghsh_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vghsh_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vghsh_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vghsh_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vghsh.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vghsh_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vghsh_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vgmul.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vgmul.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vgmul.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vgmul_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vgmul_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vgmul_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vgmul_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vgmul_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vgmul_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vgmul_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vgmul_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vgmul_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vgmul.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vgmul_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vgmul_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vrev8.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vrev8.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vrev8.c @@ -0,0 +1,895 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_tu(maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_tum(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_tumu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrev8_v_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrev8_v_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrev8_v_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrev8_v_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrev8_v_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrev8_v_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrev8_v_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrev8_v_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrev8_v_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrev8_v_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrev8_v_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrev8_v_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrev8_v_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrev8_v_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrev8_v_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrev8_v_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrev8_v_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrev8_v_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrev8_v_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrev8_v_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrev8_v_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrev8_v_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrev8.mask.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrev8_v_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t vl) { + return __riscv_vrev8_mu(mask, maskedoff, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vrol.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vrol.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vrol.c @@ -0,0 +1,1775 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vrol_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vrol_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vrol_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vrol_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vrol_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vrol_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vrol_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vrol_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vrol_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vrol_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vrol_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vrol_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vrol_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vrol_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vrol_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vrol_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vrol_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vrol_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vrol_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vrol_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vrol_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vrol_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vrol.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vrol_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vrol_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vror.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vror.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vror.c @@ -0,0 +1,1775 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf8_t test_vror_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf4_t test_vror_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8mf2_t test_vror_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vror_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vror_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vror_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, vuint8m8_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u8m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv64i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vror_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vror_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vror_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vror_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vror_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vror_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, vuint16m8_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv32i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vror_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vror_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vror_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vror_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vror_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vror_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vror_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vror_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vror_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vror_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vror.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vror_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t vs2, size_t rs1, size_t vl) { + return __riscv_vror_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2ch.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2ch.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2ch.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ch_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ch_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ch_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ch_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ch_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ch_vv_u64m1_tu(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ch_vv_u64m2_tu(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ch_vv_u64m4_tu(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ch_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ch.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ch_vv_u64m8_tu(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ch_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2cl.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2cl.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2cl.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2cl_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2cl_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2cl_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2cl_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2cl_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2cl_vv_u64m1_tu(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2cl_vv_u64m2_tu(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2cl_vv_u64m4_tu(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2cl_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2cl.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2cl_vv_u64m8_tu(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2cl_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2ms.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2ms.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsha2ms.c @@ -0,0 +1,105 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsha2ms_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsha2ms_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsha2ms_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsha2ms_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsha2ms_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv1i64.nxv1i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vsha2ms_vv_u64m1_tu(vuint64m1_t vd, vuint64m1_t vs2, vuint64m1_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv2i64.nxv2i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vsha2ms_vv_u64m2_tu(vuint64m2_t vd, vuint64m2_t vs2, vuint64m2_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv4i64.nxv4i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vsha2ms_vv_u64m4_tu(vuint64m4_t vd, vuint64m4_t vs2, vuint64m4_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsha2ms_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsha2ms.nxv8i64.nxv8i64.i64( [[VD]], [[VS2]], [[VS1]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vsha2ms_vv_u64m8_tu(vuint64m8_t vd, vuint64m8_t vs2, vuint64m8_t vs1, size_t vl) { + return __riscv_vsha2ms_tu(vd, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm3c.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm3c.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm3c.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv1i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3c_vi_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm3c_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv2i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3c_vi_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm3c_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv4i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3c_vi_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm3c_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv8i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3c_vi_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm3c_tu(vd, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3c_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3c.nxv16i32.i64.i64( [[VD]], [[VS2]], i64 0, i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3c_vi_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm3c_tu(vd, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm3me.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm3me.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm3me.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm3me_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vsm3me_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm3me_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vsm3me_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm3me_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vsm3me_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm3me_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vsm3me_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm3me_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm3me.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm3me_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, vuint32m8_t vs1, size_t vl) { + return __riscv_vsm3me_tu(maskedoff, vs2, vs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm4k.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm4k.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm4k.c @@ -0,0 +1,65 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4k_vi_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4k_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4k_vi_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4k_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4k_vi_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4k_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4k_vi_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4k_tu(maskedoff, vs2, 0, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4k_vi_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4k.nxv16i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 0, i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4k_vi_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4k_tu(maskedoff, vs2, 0, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm4r.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm4r.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vsm4r.c @@ -0,0 +1,215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vv_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32mf2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv1i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vsm4r_vs_u32mf2_u32mf2_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32mf2_u32m1_tu(vuint32m1_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32mf2_u32m2_tu(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32mf2_u32m4_tu(vuint32m4_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32mf2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv1i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32mf2_u32m8_tu(vuint32m8_t vd, vuint32mf2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vv_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m1_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv2i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vsm4r_vs_u32m1_u32m1_tu(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m1_u32m2_tu(vuint32m2_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m1_u32m4_tu(vuint32m4_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m1_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv2i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m1_u32m8_tu(vuint32m8_t vd, vuint32m1_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vv_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m2_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv4i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vsm4r_vs_u32m2_u32m2_tu(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m2_u32m4_tu(vuint32m4_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m2_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv4i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m2_u32m8_tu(vuint32m8_t vd, vuint32m2_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vv_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m4_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv8i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vsm4r_vs_u32m4_u32m4_tu(vuint32m4_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m4_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv8i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m4_u32m8_tu(vuint32m8_t vd, vuint32m4_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vv.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vv_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vv_tu(vd, vs2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vsm4r_vs_u32m8_u32m8_tu +// CHECK-RV64-SAME: ( [[VD:%.*]], [[VS2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vsm4r.vs.nxv16i32.nxv16i32.i64( [[VD]], [[VS2]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vsm4r_vs_u32m8_u32m8_tu(vuint32m8_t vd, vuint32m8_t vs2, size_t vl) { + return __riscv_vsm4r_vs_tu(vd, vs2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vwsll.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vwsll.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vwsll.c @@ -0,0 +1,1215 @@ +// NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py UTC_ARGS: --version 2 +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +v -target-feature +zvl512b \ +// RUN: -target-feature +experimental-zvbb \ +// RUN: -target-feature +experimental-zvbc \ +// RUN: -target-feature +experimental-zvkg \ +// RUN: -target-feature +experimental-zvkned \ +// RUN: -target-feature +experimental-zvknhb \ +// RUN: -target-feature +experimental-zvksed \ +// RUN: -target-feature +experimental-zvksh -disable-O0-optnone \ +// RUN: -emit-llvm %s -o - | opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_tu(vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_tu(vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_tu(vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_tu(vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_tu(vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_tu(vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_tu(vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_tu(vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_tu(vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_tu(vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_tu(vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_tu(vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_tu(vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_tu(vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_tu(vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_tu(vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_tu(vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_tu(vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_tu(vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_tu(vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_tu(vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_tu(vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_tu(vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_tu +// CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_tu(vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tu(maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_tum +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tum(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_tumu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_tumu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, vuint8mf8_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i16.nxv1i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf4_t test_vwsll_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint8mf8_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, vuint8mf4_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i16.nxv2i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16mf2_t test_vwsll_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint8mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, vuint8mf2_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i16.nxv4i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vwsll_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint8mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, vuint8m1_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i16.nxv8i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vwsll_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint8m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, vuint8m2_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i16.nxv16i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vwsll_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint8m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, vuint8m4_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u16m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv32i16.nxv32i8.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vwsll_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint8m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, vuint16mf4_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32mf2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i32.nxv1i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32mf2_t test_vwsll_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint16mf4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, vuint16mf2_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i32.nxv2i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vwsll_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint16mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, vuint16m1_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i32.nxv4i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vwsll_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint16m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, vuint16m2_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i32.nxv8i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vwsll_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint16m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, vuint16m4_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u32m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv16i32.nxv16i16.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vwsll_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint16m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, vuint32mf2_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m1_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv1i64.nxv1i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vwsll_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint32mf2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, vuint32m1_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m2_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv2i64.nxv2i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vwsll_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint32m1_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, vuint32m2_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m4_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv4i64.nxv4i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vwsll_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint32m2_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vv_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], [[VS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[VS2]], [[VS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, vuint32m4_t vs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, vs1, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vwsll_vx_u64m8_mu +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[VS2:%.*]], i64 noundef [[RS1:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vwsll.mask.nxv8i64.nxv8i32.i64.i64( [[MASKEDOFF]], [[VS2]], i64 [[RS1]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vwsll_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint32m4_t vs2, size_t rs1, size_t vl) { + return __riscv_vwsll_mu(mask, maskedoff, vs2, rs1, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaeskf1-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaeskf1-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaeskf1-out-of-range.c @@ -0,0 +1,16 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vuint32m1_t test_vaeskf1_vi_u32m1(vuint32m1_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vaeskf1_vi_u32m1(vs2, 33, vl); +} + +vuint32m1_t test_vaeskf1_vi_u32m1_tu(vuint32m1_t merge, vuint32m1_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vaeskf1_vi_u32m1_tu(merge, vs2, 33, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaeskf2-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaeskf2-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaeskf2-out-of-range.c @@ -0,0 +1,16 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vuint32m1_t test_vaeskf2_vi_u32m1(vuint32m1_t vd, vuint32m1_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vaeskf2_vi_u32m1(vd, vs2, 33, vl); +} + +vuint32m1_t test_vaeskf2_vi_u32m1_tu(vuint32m1_t merge, vuint32m1_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vaeskf2_vi_u32m1_tu(merge, vs2, 33, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vsm3c-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vsm3c-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vsm3c-out-of-range.c @@ -0,0 +1,16 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vuint32m2_t test_vsm3c_vi_u32m2(vuint32m2_t vd, vuint32m2_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vsm3c_vi_u32m2(vd, vs2, 33, vl); +} + +vuint32m2_t test_vsm3c_vi_u32m2_tu(vuint32m2_t merge, vuint32m2_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vsm3c_vi_u32m2_tu(merge, vs2, 33, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vsm4k-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vsm4k-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vsm4k-out-of-range.c @@ -0,0 +1,16 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vuint32m1_t test_vsm4k_vi_u32m1(vuint32m1_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vsm4k_vi_u32m1(vs2, 33, vl); +} + +vuint32m1_t test_vsm4k_vi_u32m1_tu(vuint32m1_t merge, vuint32m1_t vs2, size_t vl) { +// expected-error@+1 {{argument value 33 is outside the valid range [0, 31]}} + return __riscv_vsm4k_vi_u32m1_tu(merge, vs2, 33, vl); +} diff --git a/clang/test/Sema/zvk-invalid.c b/clang/test/Sema/zvk-invalid.c new file mode 100644 --- /dev/null +++ b/clang/test/Sema/zvk-invalid.c @@ -0,0 +1,23 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +v %s -fsyntax-only -verify + +#include + +void test_vaeskf1_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + __riscv_vaeskf1_vi_u32mf2_tu(vd, vs2, 0, vl); // expected-error {{RISC-V type 'vuint32mf2_t' (aka '__rvv_uint32mf2_t') requires the 'zvl256b' extension}} +} + +void test_vsm3c_tu(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + __riscv_vsm3c_vi_u32mf2_tu(vd, vs2, 0, vl); // expected-error {{RISC-V type 'vuint32mf2_t' (aka '__rvv_uint32mf2_t') requires the 'zvl512b' extension}} +} + +void test_vaeskf1(vuint32mf2_t vs2, size_t vl) { + __riscv_vaeskf1_vi_u32mf2(vs2, 0, vl); // expected-error {{RISC-V type 'vuint32mf2_t' (aka '__rvv_uint32mf2_t') requires the 'zvl256b' extension}} +} + +void test_vaesdf(vuint32mf2_t vd, vuint32mf2_t vs2, size_t vl) { + __riscv_vaesdf_vv_u32mf2(vd, vs2, vl); // expected-error {{RISC-V type 'vuint32mf2_t' (aka '__rvv_uint32mf2_t') requires the 'zvl256b' extension}} +} + +void test_vaesdf_vs(vuint32m2_t vd, vuint32mf2_t vs2, size_t vl) { + __riscv_vaesdf_vs_u32mf2_u32m2(vd, vs2, vl); // expected-error {{RISC-V type 'vuint32mf2_t' (aka '__rvv_uint32mf2_t') requires the 'zvl256b' extension}} +}