diff --git a/clang/include/clang/Basic/riscv_vector.td b/clang/include/clang/Basic/riscv_vector.td --- a/clang/include/clang/Basic/riscv_vector.td +++ b/clang/include/clang/Basic/riscv_vector.td @@ -90,11 +90,21 @@ [["vv", "v", "vvv"], ["vx", "v", "vve"]]>; +multiclass RVVSignedBinBuiltinSetRoundingMode + : RVVOutOp1BuiltinSet; + multiclass RVVUnsignedBinBuiltinSet : RVVOutOp1BuiltinSet; +multiclass RVVUnsignedBinBuiltinSetRoundingMode + : RVVOutOp1BuiltinSet; + multiclass RVVIntBinBuiltinSet : RVVSignedBinBuiltinSet, RVVUnsignedBinBuiltinSet; @@ -1715,6 +1725,17 @@ } // 13. Vector Fixed-Point Arithmetic Instructions +let HeaderCode = +[{ +enum __RISCV_VXRM { + __RISCV_VXRM_RNU = 0, + __RISCV_VXRM_RNE = 1, + __RISCV_VXRM_RDN = 2, + __RISCV_VXRM_ROD = 3, +}; +}] in +def vxrm_enum : RVVHeader; + // 13.1. Vector Single-Width Saturating Add and Subtract let UnMaskedPolicyScheme = HasPassthruOperand in { defm vsaddu : RVVUnsignedBinBuiltinSet; @@ -1723,10 +1744,46 @@ defm vssub : RVVSignedBinBuiltinSet; // 13.2. Vector Single-Width Averaging Add and Subtract -defm vaaddu : RVVUnsignedBinBuiltinSet; -defm vaadd : RVVSignedBinBuiltinSet; -defm vasubu : RVVUnsignedBinBuiltinSet; -defm vasub : RVVSignedBinBuiltinSet; +let ManualCodegen = [{ + { + // LLVM intrinsic + // Unmasked: (passthru, op0, op1, round_mode, vl) + // Masked: (passthru, vector_in, vector_in/scalar_in, mask, vxrm, vl, policy) + + SmallVector Operands; + bool HasMaskedOff = !( + (IsMasked && (PolicyAttrs & RVV_VTA) && (PolicyAttrs & RVV_VMA)) || + (!IsMasked && PolicyAttrs & RVV_VTA)); + unsigned Offset = IsMasked ? + (HasMaskedOff ? 2 : 1) : (HasMaskedOff ? 1 : 0); + + if (!HasMaskedOff) + Operands.push_back(llvm::PoisonValue::get(ResultType)); + else + Operands.push_back(Ops[IsMasked ? 1 : 0]); + + Operands.push_back(Ops[Offset]); // op0 + Operands.push_back(Ops[Offset + 1]); // op1 + + if (IsMasked) + Operands.push_back(Ops[0]); // mask + + Operands.push_back(Ops[Offset + 2]); // vxrm + Operands.push_back(Ops[Offset + 3]); // vl + + if (IsMasked) + Operands.push_back(ConstantInt::get(Ops.back()->getType(), PolicyAttrs)); + + IntrinsicTypes = {ResultType, Ops[Offset + 1]->getType(), Ops.back()->getType()}; + llvm::Function *F = CGM.getIntrinsic(ID, IntrinsicTypes); + return Builder.CreateCall(F, Operands, ""); + } +}] in { + defm vaaddu : RVVUnsignedBinBuiltinSetRoundingMode; + defm vaadd : RVVSignedBinBuiltinSetRoundingMode; + defm vasubu : RVVUnsignedBinBuiltinSetRoundingMode; + defm vasub : RVVSignedBinBuiltinSetRoundingMode; +} // 13.3. Vector Single-Width Fractional Multiply with Rounding and Saturation let RequiredFeatures = ["FullMultiply"] in { diff --git a/clang/lib/Sema/SemaChecking.cpp b/clang/lib/Sema/SemaChecking.cpp --- a/clang/lib/Sema/SemaChecking.cpp +++ b/clang/lib/Sema/SemaChecking.cpp @@ -4693,6 +4693,58 @@ // Check if rnum is in [0, 10] case RISCV::BI__builtin_riscv_aes64ks1i_64: return SemaBuiltinConstantArgRange(TheCall, 1, 0, 10); + // Check if value range for vxrm is in [0, 3] + case RISCVVector::BI__builtin_rvv_vaaddu_vv_ta: + case RISCVVector::BI__builtin_rvv_vaaddu_vx_ta: + case RISCVVector::BI__builtin_rvv_vaadd_vv_ta: + case RISCVVector::BI__builtin_rvv_vaadd_vx_ta: + case RISCVVector::BI__builtin_rvv_vasubu_vv_ta: + case RISCVVector::BI__builtin_rvv_vasubu_vx_ta: + case RISCVVector::BI__builtin_rvv_vasub_vv_ta: + case RISCVVector::BI__builtin_rvv_vasub_vx_ta: + return SemaBuiltinConstantArgRange(TheCall, 2, 0, 3); + case RISCVVector::BI__builtin_rvv_vaaddu_vv_tu: + case RISCVVector::BI__builtin_rvv_vaaddu_vx_tu: + case RISCVVector::BI__builtin_rvv_vaadd_vv_tu: + case RISCVVector::BI__builtin_rvv_vaadd_vx_tu: + case RISCVVector::BI__builtin_rvv_vasubu_vv_tu: + case RISCVVector::BI__builtin_rvv_vasubu_vx_tu: + case RISCVVector::BI__builtin_rvv_vasub_vv_tu: + case RISCVVector::BI__builtin_rvv_vasub_vx_tu: + case RISCVVector::BI__builtin_rvv_vaaddu_vv_tama: + case RISCVVector::BI__builtin_rvv_vaaddu_vx_tama: + case RISCVVector::BI__builtin_rvv_vaadd_vv_tama: + case RISCVVector::BI__builtin_rvv_vaadd_vx_tama: + case RISCVVector::BI__builtin_rvv_vasubu_vv_tama: + case RISCVVector::BI__builtin_rvv_vasubu_vx_tama: + case RISCVVector::BI__builtin_rvv_vasub_vv_tama: + case RISCVVector::BI__builtin_rvv_vasub_vx_tama: + return SemaBuiltinConstantArgRange(TheCall, 3, 0, 3); + case RISCVVector::BI__builtin_rvv_vaaddu_vv_tum: + case RISCVVector::BI__builtin_rvv_vaaddu_vv_tumu: + case RISCVVector::BI__builtin_rvv_vaaddu_vv_mu: + case RISCVVector::BI__builtin_rvv_vaaddu_vx_tum: + case RISCVVector::BI__builtin_rvv_vaaddu_vx_tumu: + case RISCVVector::BI__builtin_rvv_vaaddu_vx_mu: + case RISCVVector::BI__builtin_rvv_vaadd_vv_tum: + case RISCVVector::BI__builtin_rvv_vaadd_vv_tumu: + case RISCVVector::BI__builtin_rvv_vaadd_vv_mu: + case RISCVVector::BI__builtin_rvv_vaadd_vx_tum: + case RISCVVector::BI__builtin_rvv_vaadd_vx_tumu: + case RISCVVector::BI__builtin_rvv_vaadd_vx_mu: + case RISCVVector::BI__builtin_rvv_vasubu_vv_tum: + case RISCVVector::BI__builtin_rvv_vasubu_vv_tumu: + case RISCVVector::BI__builtin_rvv_vasubu_vv_mu: + case RISCVVector::BI__builtin_rvv_vasubu_vx_tum: + case RISCVVector::BI__builtin_rvv_vasubu_vx_tumu: + case RISCVVector::BI__builtin_rvv_vasubu_vx_mu: + case RISCVVector::BI__builtin_rvv_vasub_vv_tum: + case RISCVVector::BI__builtin_rvv_vasub_vv_tumu: + case RISCVVector::BI__builtin_rvv_vasub_vv_mu: + case RISCVVector::BI__builtin_rvv_vasub_vx_tum: + case RISCVVector::BI__builtin_rvv_vasub_vx_tumu: + case RISCVVector::BI__builtin_rvv_vasub_vx_mu: + return SemaBuiltinConstantArgRange(TheCall, 4, 0, 3); case RISCV::BI__builtin_riscv_ntl_load: case RISCV::BI__builtin_riscv_ntl_store: DeclRefExpr *DRE = diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaadd.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaadd.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaadd.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaadd.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8(vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf8(op1, op2, vl); + return __riscv_vaadd_vv_i8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8(vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf8(op1, op2, vl); + return __riscv_vaadd_vx_i8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4(vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf4(op1, op2, vl); + return __riscv_vaadd_vv_i8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4(vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf4(op1, op2, vl); + return __riscv_vaadd_vx_i8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2(vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf2(op1, op2, vl); + return __riscv_vaadd_vv_i8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2(vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf2(op1, op2, vl); + return __riscv_vaadd_vx_i8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m1(op1, op2, vl); + return __riscv_vaadd_vv_i8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m1(op1, op2, vl); + return __riscv_vaadd_vx_i8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m2(op1, op2, vl); + return __riscv_vaadd_vv_i8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m2(op1, op2, vl); + return __riscv_vaadd_vx_i8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m4(op1, op2, vl); + return __riscv_vaadd_vv_i8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m4(op1, op2, vl); + return __riscv_vaadd_vx_i8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m8(op1, op2, vl); + return __riscv_vaadd_vv_i8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m8(op1, op2, vl); + return __riscv_vaadd_vx_i8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4(vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf4(op1, op2, vl); + return __riscv_vaadd_vv_i16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4(vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf4(op1, op2, vl); + return __riscv_vaadd_vx_i16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2(vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf2(op1, op2, vl); + return __riscv_vaadd_vv_i16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2(vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf2(op1, op2, vl); + return __riscv_vaadd_vx_i16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m1(op1, op2, vl); + return __riscv_vaadd_vv_i16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m1(op1, op2, vl); + return __riscv_vaadd_vx_i16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m2(op1, op2, vl); + return __riscv_vaadd_vv_i16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m2(op1, op2, vl); + return __riscv_vaadd_vx_i16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m4(op1, op2, vl); + return __riscv_vaadd_vv_i16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m4(op1, op2, vl); + return __riscv_vaadd_vx_i16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m8(op1, op2, vl); + return __riscv_vaadd_vv_i16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m8(op1, op2, vl); + return __riscv_vaadd_vx_i16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2(vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32mf2(op1, op2, vl); + return __riscv_vaadd_vv_i32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2(vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32mf2(op1, op2, vl); + return __riscv_vaadd_vx_i32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m1(op1, op2, vl); + return __riscv_vaadd_vv_i32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m1(op1, op2, vl); + return __riscv_vaadd_vx_i32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m2(op1, op2, vl); + return __riscv_vaadd_vv_i32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m2(op1, op2, vl); + return __riscv_vaadd_vx_i32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m4(op1, op2, vl); + return __riscv_vaadd_vv_i32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m4(op1, op2, vl); + return __riscv_vaadd_vx_i32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m8(op1, op2, vl); + return __riscv_vaadd_vv_i32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m8(op1, op2, vl); + return __riscv_vaadd_vx_i32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m1(op1, op2, vl); + return __riscv_vaadd_vv_i64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m1(op1, op2, vl); + return __riscv_vaadd_vx_i64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m2(op1, op2, vl); + return __riscv_vaadd_vv_i64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m2(op1, op2, vl); + return __riscv_vaadd_vx_i64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m4(op1, op2, vl); + return __riscv_vaadd_vv_i64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m4(op1, op2, vl); + return __riscv_vaadd_vx_i64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m8(op1, op2, vl); + return __riscv_vaadd_vv_i64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m8(op1, op2, vl); + return __riscv_vaadd_vx_i64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_m(vbool64_t mask, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf8_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_m(vbool64_t mask, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf8_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_m(vbool32_t mask, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf4_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_m(vbool32_t mask, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf4_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_m(vbool16_t mask, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_m(vbool16_t mask, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_m(vbool64_t mask, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf4_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_m(vbool64_t mask, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf4_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_m(vbool32_t mask, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_m(vbool32_t mask, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_m(vbool64_t mask, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32mf2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_m(vbool64_t mask, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32mf2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m1_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m2_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m4_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vv_i64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m8_m(mask, op1, op2, vl); + return __riscv_vaadd_vx_i64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaaddu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaaddu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaaddu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vaaddu.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8(vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf8(op1, op2, vl); + return __riscv_vaaddu_vv_u8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8(vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf8(op1, op2, vl); + return __riscv_vaaddu_vx_u8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4(vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf4(op1, op2, vl); + return __riscv_vaaddu_vv_u8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4(vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf4(op1, op2, vl); + return __riscv_vaaddu_vx_u8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2(vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf2(op1, op2, vl); + return __riscv_vaaddu_vv_u8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2(vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf2(op1, op2, vl); + return __riscv_vaaddu_vx_u8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m1(op1, op2, vl); + return __riscv_vaaddu_vv_u8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m1(op1, op2, vl); + return __riscv_vaaddu_vx_u8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m2(op1, op2, vl); + return __riscv_vaaddu_vv_u8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m2(op1, op2, vl); + return __riscv_vaaddu_vx_u8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m4(op1, op2, vl); + return __riscv_vaaddu_vv_u8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m4(op1, op2, vl); + return __riscv_vaaddu_vx_u8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m8(op1, op2, vl); + return __riscv_vaaddu_vv_u8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m8(op1, op2, vl); + return __riscv_vaaddu_vx_u8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4(vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf4(op1, op2, vl); + return __riscv_vaaddu_vv_u16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4(vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf4(op1, op2, vl); + return __riscv_vaaddu_vx_u16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2(vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf2(op1, op2, vl); + return __riscv_vaaddu_vv_u16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2(vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf2(op1, op2, vl); + return __riscv_vaaddu_vx_u16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m1(op1, op2, vl); + return __riscv_vaaddu_vv_u16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m1(op1, op2, vl); + return __riscv_vaaddu_vx_u16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m2(op1, op2, vl); + return __riscv_vaaddu_vv_u16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m2(op1, op2, vl); + return __riscv_vaaddu_vx_u16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m4(op1, op2, vl); + return __riscv_vaaddu_vv_u16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m4(op1, op2, vl); + return __riscv_vaaddu_vx_u16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m8(op1, op2, vl); + return __riscv_vaaddu_vv_u16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m8(op1, op2, vl); + return __riscv_vaaddu_vx_u16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2(vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32mf2(op1, op2, vl); + return __riscv_vaaddu_vv_u32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2(vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32mf2(op1, op2, vl); + return __riscv_vaaddu_vx_u32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m1(op1, op2, vl); + return __riscv_vaaddu_vv_u32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m1(op1, op2, vl); + return __riscv_vaaddu_vx_u32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m2(op1, op2, vl); + return __riscv_vaaddu_vv_u32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m2(op1, op2, vl); + return __riscv_vaaddu_vx_u32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m4(op1, op2, vl); + return __riscv_vaaddu_vv_u32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m4(op1, op2, vl); + return __riscv_vaaddu_vx_u32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m8(op1, op2, vl); + return __riscv_vaaddu_vv_u32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m8(op1, op2, vl); + return __riscv_vaaddu_vx_u32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m1(op1, op2, vl); + return __riscv_vaaddu_vv_u64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m1(op1, op2, vl); + return __riscv_vaaddu_vx_u64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m2(op1, op2, vl); + return __riscv_vaaddu_vv_u64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m2(op1, op2, vl); + return __riscv_vaaddu_vx_u64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m4(op1, op2, vl); + return __riscv_vaaddu_vv_u64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m4(op1, op2, vl); + return __riscv_vaaddu_vx_u64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m8(op1, op2, vl); + return __riscv_vaaddu_vv_u64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m8(op1, op2, vl); + return __riscv_vaaddu_vx_u64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32mf2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32mf2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m1_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m2_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m4_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vv_u64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m8_m(mask, op1, op2, vl); + return __riscv_vaaddu_vx_u64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasub.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasub.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasub.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasub.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8(vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf8(op1, op2, vl); + return __riscv_vasub_vv_i8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8(vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf8(op1, op2, vl); + return __riscv_vasub_vx_i8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4(vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf4(op1, op2, vl); + return __riscv_vasub_vv_i8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4(vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf4(op1, op2, vl); + return __riscv_vasub_vx_i8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2(vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf2(op1, op2, vl); + return __riscv_vasub_vv_i8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2(vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf2(op1, op2, vl); + return __riscv_vasub_vx_i8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_vv_i8m1(op1, op2, vl); + return __riscv_vasub_vv_i8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m1(op1, op2, vl); + return __riscv_vasub_vx_i8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_vv_i8m2(op1, op2, vl); + return __riscv_vasub_vv_i8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m2(op1, op2, vl); + return __riscv_vasub_vx_i8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_vv_i8m4(op1, op2, vl); + return __riscv_vasub_vv_i8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m4(op1, op2, vl); + return __riscv_vasub_vx_i8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_vv_i8m8(op1, op2, vl); + return __riscv_vasub_vv_i8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m8(op1, op2, vl); + return __riscv_vasub_vx_i8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4(vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf4(op1, op2, vl); + return __riscv_vasub_vv_i16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4(vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf4(op1, op2, vl); + return __riscv_vasub_vx_i16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2(vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf2(op1, op2, vl); + return __riscv_vasub_vv_i16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2(vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf2(op1, op2, vl); + return __riscv_vasub_vx_i16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_vv_i16m1(op1, op2, vl); + return __riscv_vasub_vv_i16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m1(op1, op2, vl); + return __riscv_vasub_vx_i16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_vv_i16m2(op1, op2, vl); + return __riscv_vasub_vv_i16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m2(op1, op2, vl); + return __riscv_vasub_vx_i16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_vv_i16m4(op1, op2, vl); + return __riscv_vasub_vv_i16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m4(op1, op2, vl); + return __riscv_vasub_vx_i16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_vv_i16m8(op1, op2, vl); + return __riscv_vasub_vv_i16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m8(op1, op2, vl); + return __riscv_vasub_vx_i16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2(vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i32mf2(op1, op2, vl); + return __riscv_vasub_vv_i32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2(vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32mf2(op1, op2, vl); + return __riscv_vasub_vx_i32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_vv_i32m1(op1, op2, vl); + return __riscv_vasub_vv_i32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m1(op1, op2, vl); + return __riscv_vasub_vx_i32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_vv_i32m2(op1, op2, vl); + return __riscv_vasub_vv_i32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m2(op1, op2, vl); + return __riscv_vasub_vx_i32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_vv_i32m4(op1, op2, vl); + return __riscv_vasub_vv_i32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m4(op1, op2, vl); + return __riscv_vasub_vx_i32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_vv_i32m8(op1, op2, vl); + return __riscv_vasub_vv_i32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m8(op1, op2, vl); + return __riscv_vasub_vx_i32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_vv_i64m1(op1, op2, vl); + return __riscv_vasub_vv_i64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m1(op1, op2, vl); + return __riscv_vasub_vx_i64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_vv_i64m2(op1, op2, vl); + return __riscv_vasub_vv_i64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m2(op1, op2, vl); + return __riscv_vasub_vx_i64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_vv_i64m4(op1, op2, vl); + return __riscv_vasub_vv_i64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m4(op1, op2, vl); + return __riscv_vasub_vx_i64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_vv_i64m8(op1, op2, vl); + return __riscv_vasub_vv_i64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m8(op1, op2, vl); + return __riscv_vasub_vx_i64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_m(vbool64_t mask, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf8_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_m(vbool64_t mask, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf8_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_m(vbool32_t mask, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf4_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_m(vbool32_t mask, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf4_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_m(vbool16_t mask, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_m(vbool16_t mask, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_vv_i8m1_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m1_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_vv_i8m2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_vv_i8m4_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m4_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_vv_i8m8_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m8_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_m(vbool64_t mask, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf4_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_m(vbool64_t mask, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf4_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_m(vbool32_t mask, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_m(vbool32_t mask, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_vv_i16m1_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m1_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_vv_i16m2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_vv_i16m4_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m4_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_vv_i16m8_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m8_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_m(vbool64_t mask, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i32mf2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_m(vbool64_t mask, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32mf2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_vv_i32m1_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m1_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_vv_i32m2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_vv_i32m4_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m4_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_vv_i32m8_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m8_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_vv_i64m1_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m1_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_vv_i64m2_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m2_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_vv_i64m4_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m4_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_vv_i64m8_m(mask, op1, op2, vl); + return __riscv_vasub_vv_i64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m8_m(mask, op1, op2, vl); + return __riscv_vasub_vx_i64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasubu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasubu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasubu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/non-overloaded/vasubu.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8(vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf8(op1, op2, vl); + return __riscv_vasubu_vv_u8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8(vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf8(op1, op2, vl); + return __riscv_vasubu_vx_u8mf8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4(vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf4(op1, op2, vl); + return __riscv_vasubu_vv_u8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4(vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf4(op1, op2, vl); + return __riscv_vasubu_vx_u8mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2(vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf2(op1, op2, vl); + return __riscv_vasubu_vv_u8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2(vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf2(op1, op2, vl); + return __riscv_vasubu_vx_u8mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m1(op1, op2, vl); + return __riscv_vasubu_vv_u8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m1(op1, op2, vl); + return __riscv_vasubu_vx_u8m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m2(op1, op2, vl); + return __riscv_vasubu_vv_u8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m2(op1, op2, vl); + return __riscv_vasubu_vx_u8m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m4(op1, op2, vl); + return __riscv_vasubu_vv_u8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m4(op1, op2, vl); + return __riscv_vasubu_vx_u8m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m8(op1, op2, vl); + return __riscv_vasubu_vv_u8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m8(op1, op2, vl); + return __riscv_vasubu_vx_u8m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4(vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf4(op1, op2, vl); + return __riscv_vasubu_vv_u16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4(vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf4(op1, op2, vl); + return __riscv_vasubu_vx_u16mf4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2(vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf2(op1, op2, vl); + return __riscv_vasubu_vv_u16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2(vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf2(op1, op2, vl); + return __riscv_vasubu_vx_u16mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m1(op1, op2, vl); + return __riscv_vasubu_vv_u16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m1(op1, op2, vl); + return __riscv_vasubu_vx_u16m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m2(op1, op2, vl); + return __riscv_vasubu_vv_u16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m2(op1, op2, vl); + return __riscv_vasubu_vx_u16m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m4(op1, op2, vl); + return __riscv_vasubu_vv_u16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m4(op1, op2, vl); + return __riscv_vasubu_vx_u16m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m8(op1, op2, vl); + return __riscv_vasubu_vv_u16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m8(op1, op2, vl); + return __riscv_vasubu_vx_u16m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2(vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32mf2(op1, op2, vl); + return __riscv_vasubu_vv_u32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2(vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32mf2(op1, op2, vl); + return __riscv_vasubu_vx_u32mf2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m1(op1, op2, vl); + return __riscv_vasubu_vv_u32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m1(op1, op2, vl); + return __riscv_vasubu_vx_u32m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m2(op1, op2, vl); + return __riscv_vasubu_vv_u32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m2(op1, op2, vl); + return __riscv_vasubu_vx_u32m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m4(op1, op2, vl); + return __riscv_vasubu_vv_u32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m4(op1, op2, vl); + return __riscv_vasubu_vx_u32m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m8(op1, op2, vl); + return __riscv_vasubu_vv_u32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m8(op1, op2, vl); + return __riscv_vasubu_vx_u32m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m1(op1, op2, vl); + return __riscv_vasubu_vv_u64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m1(op1, op2, vl); + return __riscv_vasubu_vx_u64m1(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m2(op1, op2, vl); + return __riscv_vasubu_vv_u64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m2(op1, op2, vl); + return __riscv_vasubu_vx_u64m2(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m4(op1, op2, vl); + return __riscv_vasubu_vv_u64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m4(op1, op2, vl); + return __riscv_vasubu_vx_u64m4(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m8(op1, op2, vl); + return __riscv_vasubu_vv_u64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m8(op1, op2, vl); + return __riscv_vasubu_vx_u64m8(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf8_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf8_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8mf8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf4_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf4_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u8m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf4_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf4_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u16mf4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u16mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u16m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u16m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u16m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u16m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32mf2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32mf2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u32mf2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u32m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u32m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u32m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u32m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m1_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u64m1_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m2_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u64m2_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m4_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u64m4_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vv_u64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m8_m(mask, op1, op2, vl); + return __riscv_vasubu_vx_u64m8_m(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaadd.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaadd.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaadd.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaadd.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8(vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8(vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4(vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4(vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2(vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2(vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4(vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4(vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2(vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2(vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2(vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2(vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(op1, op2, vl); + return __riscv_vaadd(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_m(vbool64_t mask, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_m(vbool64_t mask, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_m(vbool32_t mask, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_m(vbool32_t mask, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_m(vbool16_t mask, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_m(vbool16_t mask, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_m(vbool64_t mask, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_m(vbool64_t mask, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_m(vbool32_t mask, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_m(vbool32_t mask, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_m(vbool64_t mask, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_m(vbool64_t mask, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd(mask, op1, op2, vl); + return __riscv_vaadd(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaaddu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaaddu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaaddu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vaaddu.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8(vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8(vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4(vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4(vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2(vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2(vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4(vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4(vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2(vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2(vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2(vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2(vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(op1, op2, vl); + return __riscv_vaaddu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu(mask, op1, op2, vl); + return __riscv_vaaddu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasub.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasub.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasub.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasub.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8(vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8(vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4(vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4(vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2(vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2(vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1(vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1(vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2(vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2(vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4(vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4(vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8(vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8(vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4(vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4(vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2(vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2(vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1(vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1(vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2(vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2(vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4(vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4(vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8(vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8(vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2(vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2(vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2(vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2(vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4(vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4(vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8(vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8(vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1(vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1(vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2(vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2(vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4(vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4(vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8(vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8(vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(op1, op2, vl); + return __riscv_vasub(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_m(vbool64_t mask, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_m(vbool64_t mask, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_m(vbool32_t mask, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_m(vbool32_t mask, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_m(vbool16_t mask, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_m(vbool16_t mask, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_m(vbool64_t mask, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_m(vbool64_t mask, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_m(vbool32_t mask, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_m(vbool32_t mask, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_m(vbool64_t mask, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_m(vbool64_t mask, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub(mask, op1, op2, vl); + return __riscv_vasub(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasubu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasubu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasubu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/non-policy/overloaded/vasubu.c @@ -9,880 +9,880 @@ // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8(vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8(vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4(vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4(vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2(vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2(vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1(vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2(vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4(vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8(vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4(vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4(vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2(vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2(vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1(vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2(vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4(vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8(vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2(vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2(vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2(vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4(vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8(vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1(vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2(vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4(vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8 // CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8(vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(op1, op2, vl); + return __riscv_vasubu(op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_m(vbool64_t mask, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_m(vbool32_t mask, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_m(vbool16_t mask, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_m(vbool64_t mask, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_m(vbool32_t mask, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_m(vbool64_t mask, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_m // CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 3) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 3) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu(mask, op1, op2, vl); + return __riscv_vasubu(mask, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaadd.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaadd.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaadd.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaadd.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_vx_i8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_vx_i16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_vx_i32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_vv_i64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vv_i64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_vx_i64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_vx_i64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaaddu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaaddu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaaddu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vaaddu.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_vx_u8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_vx_u16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_vx_u32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_vv_u64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vv_u64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_vx_u64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_vx_u64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasub.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasub.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasub.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasub.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_vv_i8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_vv_i8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_vv_i8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_vv_i8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_vv_i16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_vv_i16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_vv_i16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_vv_i16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_vv_i32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_vv_i32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_vv_i32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_vv_i32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_vv_i64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_vv_i64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_vv_i64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_vv_i64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_vv_i8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_vv_i8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_vv_i8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_vv_i8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_vv_i16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_vv_i16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_vv_i16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_vv_i16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_vv_i32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_vv_i32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_vv_i32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_vv_i32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_vv_i64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_vv_i64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_vv_i64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_vv_i64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_vv_i8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_vv_i8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_vv_i8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_vv_i8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_vv_i16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_vv_i16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_vv_i16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_vv_i16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_vv_i32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_vv_i32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_vv_i32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_vv_i32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_vv_i64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_vv_i64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_vv_i64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_vv_i64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_vv_i8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_vv_i8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_vv_i8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_vv_i8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_vx_i8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_vv_i16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_vv_i16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_vv_i16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_vv_i16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_vx_i16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_vv_i32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_vv_i32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_vv_i32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_vv_i32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_vv_i32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_vx_i32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_vv_i64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_vv_i64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_vv_i64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_vv_i64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vv_i64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_vx_i64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_vx_i64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasubu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasubu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasubu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/non-overloaded/vasubu.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32mf2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32mf2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m1_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m1_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m2_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m2_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m4_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m4_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m8_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m8_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32mf2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32mf2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m1_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m1_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m2_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m2_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m4_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m4_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m8_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m8_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32mf2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32mf2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m1_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m1_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m2_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m2_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m4_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m4_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m8_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m8_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_vx_u8m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u8m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_vx_u16m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u16m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32mf2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32mf2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_vx_u32m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u32m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m1_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m1_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m2_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m2_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m4_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m4_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_vv_u64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vv_u64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_vx_u64m8_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_vx_u64m8_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaadd.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaadd.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaadd.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaadd.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tu(maskedoff, op1, op2, vl); + return __riscv_vaadd_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vv_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vaadd_vx_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vv_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vaadd_vx_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vv_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vaadd_vx_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vv_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vaadd_vx_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vv_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vaadd_vx_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vv_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vaadd_vx_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vv_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vaadd_vx_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vv_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vaadd_vx_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vv_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vaadd_vx_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vv_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vaadd_vx_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vv_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vaadd_vx_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vv_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vaadd_vx_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vv_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vaadd_vx_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vv_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vaadd_vx_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vv_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vaadd_vx_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vv_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vaadd_vx_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vv_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vaadd_vx_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vv_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vaadd_vx_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vv_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vaadd_vx_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vv_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vaadd_vx_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vv_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vaadd_vx_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vv_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vv_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaadd_vx_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaadd.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vaadd_vx_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vaadd_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaadd_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaaddu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaaddu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaaddu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vaaddu.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tu(maskedoff, op1, op2, vl); + return __riscv_vaaddu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vaaddu_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vaaddu_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vaaddu_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vaaddu_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vaaddu_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vaaddu_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vaaddu_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vaaddu_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vaaddu_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vaaddu_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vaaddu_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vaaddu_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vaaddu_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vaaddu_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vaaddu_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vaaddu_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vaaddu_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vaaddu_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vaaddu_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vaaddu_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vaaddu_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vv_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vaaddu_vx_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vaaddu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vaaddu_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vaaddu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasub.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasub.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasub.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasub.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_tu(vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_tu(vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_tu(vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_tu(vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_tu(vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_tu(vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_tu(vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_tu(vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_tu(vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_tu(vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_tu(vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_tu(vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_tu(vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_tu(vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_tu(vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_tu(vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_tu(vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_tu(vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_tu(vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_tu(vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_tu(vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tu(maskedoff, op1, op2, vl); + return __riscv_vasub_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_tum(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_tum(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_tum(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_tum(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_tum(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_tum(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_tum(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_tum(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_tum(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_tum(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_tum(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_tum(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_tum(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_tum(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_tum(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_tum(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_tum(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_tum(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_tum(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_tum(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_tum(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_tumu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_tumu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_tumu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_tumu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_tumu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_tumu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_tumu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_tumu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_tumu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_tumu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_tumu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_tumu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_tumu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_tumu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_tumu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_tumu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_tumu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_tumu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_tumu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_tumu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_tumu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vv_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, vint8mf8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf8_t test_vasub_vx_i8mf8_mu(vbool64_t mask, vint8mf8_t maskedoff, vint8mf8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vv_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, vint8mf4_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf4_t test_vasub_vx_i8mf4_mu(vbool32_t mask, vint8mf4_t maskedoff, vint8mf4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vv_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, vint8mf2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8mf2_t test_vasub_vx_i8mf2_mu(vbool16_t mask, vint8mf2_t maskedoff, vint8mf2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vv_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, vint8m1_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m1_t test_vasub_vx_i8m1_mu(vbool8_t mask, vint8m1_t maskedoff, vint8m1_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vv_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, vint8m2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m2_t test_vasub_vx_i8m2_mu(vbool4_t mask, vint8m2_t maskedoff, vint8m2_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vv_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, vint8m4_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m4_t test_vasub_vx_i8m4_mu(vbool2_t mask, vint8m4_t maskedoff, vint8m4_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vv_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, vint8m8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint8m8_t test_vasub_vx_i8m8_mu(vbool1_t mask, vint8m8_t maskedoff, vint8m8_t op1, int8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vv_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, vint16mf4_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf4_t test_vasub_vx_i16mf4_mu(vbool64_t mask, vint16mf4_t maskedoff, vint16mf4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vv_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, vint16mf2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16mf2_t test_vasub_vx_i16mf2_mu(vbool32_t mask, vint16mf2_t maskedoff, vint16mf2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vv_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, vint16m1_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m1_t test_vasub_vx_i16m1_mu(vbool16_t mask, vint16m1_t maskedoff, vint16m1_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vv_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, vint16m2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m2_t test_vasub_vx_i16m2_mu(vbool8_t mask, vint16m2_t maskedoff, vint16m2_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vv_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, vint16m4_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m4_t test_vasub_vx_i16m4_mu(vbool4_t mask, vint16m4_t maskedoff, vint16m4_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vv_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, vint16m8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint16m8_t test_vasub_vx_i16m8_mu(vbool2_t mask, vint16m8_t maskedoff, vint16m8_t op1, int16_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vv_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, vint32mf2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32mf2_t test_vasub_vx_i32mf2_mu(vbool64_t mask, vint32mf2_t maskedoff, vint32mf2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vv_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m1_t test_vasub_vx_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vv_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, vint32m2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m2_t test_vasub_vx_i32m2_mu(vbool16_t mask, vint32m2_t maskedoff, vint32m2_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vv_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, vint32m4_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m4_t test_vasub_vx_i32m4_mu(vbool8_t mask, vint32m4_t maskedoff, vint32m4_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vv_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, vint32m8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint32m8_t test_vasub_vx_i32m8_mu(vbool4_t mask, vint32m8_t maskedoff, vint32m8_t op1, int32_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vv_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, vint64m1_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m1_t test_vasub_vx_i64m1_mu(vbool64_t mask, vint64m1_t maskedoff, vint64m1_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vv_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, vint64m2_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m2_t test_vasub_vx_i64m2_mu(vbool32_t mask, vint64m2_t maskedoff, vint64m2_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vv_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, vint64m4_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m4_t test_vasub_vx_i64m4_mu(vbool16_t mask, vint64m4_t maskedoff, vint64m4_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vv_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vv_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, vint64m8_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasub_vx_i64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasub.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vint64m8_t test_vasub_vx_i64m8_mu(vbool8_t mask, vint64m8_t maskedoff, vint64m8_t op1, int64_t op2, size_t vl) { - return __riscv_vasub_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasub_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasubu.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasubu.c --- a/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasubu.c +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-autogenerated/policy/overloaded/vasubu.c @@ -9,1760 +9,1760 @@ // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_tu(vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_tu(vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_tu(vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_tu(vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_tu(vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_tu(vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_tu(vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_tu(vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_tu(vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_tu(vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_tu(vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_tu(vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_tu(vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_tu(vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_tu(vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_tu(vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_tu(vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_tu(vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_tu(vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_tu(vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_tu // CHECK-RV64-SAME: ( [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], i64 0, i64 [[VL]]) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_tu(vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tu(maskedoff, op1, op2, vl); + return __riscv_vasubu_tu(maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_tum(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_tum(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_tum(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_tum(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_tum(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_tum(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_tum(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_tum(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_tum(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_tum(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_tum(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_tum(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_tum(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_tum(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_tum(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_tum(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_tum(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_tum(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_tum(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_tum(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_tum // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 2) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 2) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_tum(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tum(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tum(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_tumu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_tumu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_tumu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_tumu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_tumu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_tumu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_tumu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_tumu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_tumu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_tumu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_tumu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_tumu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_tumu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_tumu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_tumu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_tumu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_tumu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_tumu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_tumu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_tumu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_tumu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 0) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 0) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_tumu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_tumu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vv_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, vuint8mf8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf8_t test_vasubu_vx_u8mf8_mu(vbool64_t mask, vuint8mf8_t maskedoff, vuint8mf8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vv_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, vuint8mf4_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf4_t test_vasubu_vx_u8mf4_mu(vbool32_t mask, vuint8mf4_t maskedoff, vuint8mf4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vv_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, vuint8mf2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8mf2_t test_vasubu_vx_u8mf2_mu(vbool16_t mask, vuint8mf2_t maskedoff, vuint8mf2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vv_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m1_t test_vasubu_vx_u8m1_mu(vbool8_t mask, vuint8m1_t maskedoff, vuint8m1_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vv_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m2_t test_vasubu_vx_u8m2_mu(vbool4_t mask, vuint8m2_t maskedoff, vuint8m2_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vv_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m4_t test_vasubu_vx_u8m4_mu(vbool2_t mask, vuint8m4_t maskedoff, vuint8m4_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vv_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u8m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv64i8.i8.i64( [[MASKEDOFF]], [[OP1]], i8 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint8m8_t test_vasubu_vx_u8m8_mu(vbool1_t mask, vuint8m8_t maskedoff, vuint8m8_t op1, uint8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vv_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, vuint16mf4_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf4_t test_vasubu_vx_u16mf4_mu(vbool64_t mask, vuint16mf4_t maskedoff, vuint16mf4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vv_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, vuint16mf2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16mf2_t test_vasubu_vx_u16mf2_mu(vbool32_t mask, vuint16mf2_t maskedoff, vuint16mf2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vv_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m1_t test_vasubu_vx_u16m1_mu(vbool16_t mask, vuint16m1_t maskedoff, vuint16m1_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vv_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m2_t test_vasubu_vx_u16m2_mu(vbool8_t mask, vuint16m2_t maskedoff, vuint16m2_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vv_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m4_t test_vasubu_vx_u16m4_mu(vbool4_t mask, vuint16m4_t maskedoff, vuint16m4_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vv_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u16m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv32i16.i16.i64( [[MASKEDOFF]], [[OP1]], i16 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint16m8_t test_vasubu_vx_u16m8_mu(vbool2_t mask, vuint16m8_t maskedoff, vuint16m8_t op1, uint16_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vv_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, vuint32mf2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32mf2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32mf2_t test_vasubu_vx_u32mf2_mu(vbool64_t mask, vuint32mf2_t maskedoff, vuint32mf2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m1_t test_vasubu_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vv_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m2_t test_vasubu_vx_u32m2_mu(vbool16_t mask, vuint32m2_t maskedoff, vuint32m2_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vv_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m4_t test_vasubu_vx_u32m4_mu(vbool8_t mask, vuint32m4_t maskedoff, vuint32m4_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vv_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u32m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv16i32.i32.i64( [[MASKEDOFF]], [[OP1]], i32 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint32m8_t test_vasubu_vx_u32m8_mu(vbool4_t mask, vuint32m8_t maskedoff, vuint32m8_t op1, uint32_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vv_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m1_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv1i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m1_t test_vasubu_vx_u64m1_mu(vbool64_t mask, vuint64m1_t maskedoff, vuint64m1_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vv_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m2_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv2i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m2_t test_vasubu_vx_u64m2_mu(vbool32_t mask, vuint64m2_t maskedoff, vuint64m2_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vv_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m4_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv4i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m4_t test_vasubu_vx_u64m4_mu(vbool16_t mask, vuint64m4_t maskedoff, vuint64m4_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vv_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64.i64( [[MASKEDOFF]], [[OP1]], [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vv_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } // CHECK-RV64-LABEL: define dso_local @test_vasubu_vx_u64m8_mu // CHECK-RV64-SAME: ( [[MASK:%.*]], [[MASKEDOFF:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0]] { // CHECK-RV64-NEXT: entry: -// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]], i64 1) +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.vasubu.mask.nxv8i64.i64.i64( [[MASKEDOFF]], [[OP1]], i64 [[OP2]], [[MASK]], i64 0, i64 [[VL]], i64 1) // CHECK-RV64-NEXT: ret [[TMP0]] // vuint64m8_t test_vasubu_vx_u64m8_mu(vbool8_t mask, vuint64m8_t maskedoff, vuint64m8_t op1, uint64_t op2, size_t vl) { - return __riscv_vasubu_mu(mask, maskedoff, op1, op2, vl); + return __riscv_vasubu_mu(mask, maskedoff, op1, op2, __RISCV_VXRM_RNU, vl); } diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaadd-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaadd-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaadd-out-of-range.c @@ -0,0 +1,67 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vint32m1_t test_vaadd_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vv_i32m1(op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vx_i32m1(op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vv_i32m1_m(mask, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vx_i32m1_m(mask, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vv_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vv_i32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vx_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vx_i32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vv_i32m1_tum( + vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vv_i32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vx_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vx_i32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vv_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vv_i32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vx_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vx_i32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vv_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vv_i32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vaadd_vx_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaadd_vx_i32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaaddu-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaaddu-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vaaddu-out-of-range.c @@ -0,0 +1,67 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vuint32m1_t test_vaaddu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vv_u32m1(op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vx_u32m1(op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vv_u32m1_m(mask, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vx_u32m1_m(mask, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vv_u32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vx_u32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vv_u32m1_tum( + vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vv_u32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vx_u32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vv_u32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vx_u32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vv_u32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vaaddu_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vaaddu_vx_u32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vasub-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vasub-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vasub-out-of-range.c @@ -0,0 +1,67 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vint32m1_t test_vasub_vv_i32m1(vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vv_i32m1(op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vx_i32m1(vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vx_i32m1(op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vv_i32m1_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vv_i32m1_m(mask, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vx_i32m1_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vx_i32m1_m(mask, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vv_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vv_i32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vx_i32m1_tu(vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vx_i32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vv_i32m1_tum( + vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vv_i32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vx_i32m1_tum(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vx_i32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vv_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vv_i32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vx_i32m1_tumu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vx_i32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vv_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, vint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vv_i32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} + +vint32m1_t test_vasub_vx_i32m1_mu(vbool32_t mask, vint32m1_t maskedoff, vint32m1_t op1, int32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasub_vx_i32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} diff --git a/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vasubu-out-of-range.c b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vasubu-out-of-range.c new file mode 100644 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv-intrinsics-handcrafted/vasubu-out-of-range.c @@ -0,0 +1,67 @@ +// REQUIRES: riscv-registered-target +// RUN: %clang_cc1 -triple riscv64 -target-feature +f -target-feature +d \ +// RUN: -target-feature +v -target-feature +zfh -target-feature +experimental-zvfh \ +// RUN: -fsyntax-only -verify %s + +#include + +vuint32m1_t test_vasubu_vv_u32m1(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vv_u32m1(op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vx_u32m1(vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vx_u32m1(op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vv_u32m1_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vv_u32m1_m(mask, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vx_u32m1_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vx_u32m1_m(mask, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vv_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vv_u32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vx_u32m1_tu(vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vx_u32m1_tu(maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vv_u32m1_tum( + vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vv_u32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vx_u32m1_tum(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vx_u32m1_tum(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vv_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vv_u32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vx_u32m1_tumu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vx_u32m1_tumu(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vv_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vv_u32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} + +vuint32m1_t test_vasubu_vx_u32m1_mu(vbool32_t mask, vuint32m1_t maskedoff, vuint32m1_t op1, uint32_t op2, size_t vl) { + // expected-error@+1 {{argument value 5 is outside the valid range [0, 3]}} + return __riscv_vasubu_vx_u32m1_mu(mask, maskedoff, op1, op2, 5, vl); +} diff --git a/llvm/include/llvm/IR/IntrinsicsRISCV.td b/llvm/include/llvm/IR/IntrinsicsRISCV.td --- a/llvm/include/llvm/IR/IntrinsicsRISCV.td +++ b/llvm/include/llvm/IR/IntrinsicsRISCV.td @@ -1361,18 +1361,10 @@ def "int_riscv_vcompress" : RISCVCompress; - defm vaaddu : RISCVSaturatingBinaryAAX; - defm vaadd : RISCVSaturatingBinaryAAX; - defm vasubu : RISCVSaturatingBinaryAAX; - defm vasub : RISCVSaturatingBinaryAAX; - - // Note: These intrinsics has an additional `.rm` to their name compared to - // their existing ones. These intrinsics has an additional operand that models - // the rounding mode and will replace the existing ones in the next commit. - defm vaaddu_rm : RISCVSaturatingBinaryAAXRoundingMode; - defm vaadd_rm : RISCVSaturatingBinaryAAXRoundingMode; - defm vasubu_rm : RISCVSaturatingBinaryAAXRoundingMode; - defm vasub_rm : RISCVSaturatingBinaryAAXRoundingMode; + defm vaaddu : RISCVSaturatingBinaryAAXRoundingMode; + defm vaadd : RISCVSaturatingBinaryAAXRoundingMode; + defm vasubu : RISCVSaturatingBinaryAAXRoundingMode; + defm vasub : RISCVSaturatingBinaryAAXRoundingMode; defm vsmul : RISCVSaturatingBinaryAAX; diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td b/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td --- a/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoVPseudos.td @@ -4029,7 +4029,7 @@ int sew, VReg op1_reg_class, DAGOperand op2_kind> : - Pat<(result_type (!cast(intrinsic_name # "_rm") + Pat<(result_type (!cast(intrinsic_name) (result_type (undef)), (op1_type op1_reg_class:$rs1), (op2_type op2_kind:$rs2), @@ -4050,7 +4050,7 @@ VReg result_reg_class, VReg op1_reg_class, DAGOperand op2_kind> : - Pat<(result_type (!cast(intrinsic_name # "_rm") + Pat<(result_type (!cast(intrinsic_name) (result_type result_reg_class:$merge), (op1_type op1_reg_class:$rs1), (op2_type op2_kind:$rs2), @@ -4136,7 +4136,7 @@ VReg result_reg_class, VReg op1_reg_class, DAGOperand op2_kind> : - Pat<(result_type (!cast(intrinsic_name#"_rm_mask") + Pat<(result_type (!cast(intrinsic_name#"_mask") (result_type result_reg_class:$merge), (op1_type op1_reg_class:$rs1), (op2_type op2_kind:$rs2), diff --git a/llvm/test/CodeGen/RISCV/rvv/masked-tama.ll b/llvm/test/CodeGen/RISCV/rvv/masked-tama.ll --- a/llvm/test/CodeGen/RISCV/rvv/masked-tama.ll +++ b/llvm/test/CodeGen/RISCV/rvv/masked-tama.ll @@ -504,7 +504,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( , , , @@ -521,7 +521,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( undef, %0, %1, diff --git a/llvm/test/CodeGen/RISCV/rvv/masked-tamu.ll b/llvm/test/CodeGen/RISCV/rvv/masked-tamu.ll --- a/llvm/test/CodeGen/RISCV/rvv/masked-tamu.ll +++ b/llvm/test/CodeGen/RISCV/rvv/masked-tamu.ll @@ -478,7 +478,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( , , , @@ -494,7 +494,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( %0, %1, %2, diff --git a/llvm/test/CodeGen/RISCV/rvv/masked-tuma.ll b/llvm/test/CodeGen/RISCV/rvv/masked-tuma.ll --- a/llvm/test/CodeGen/RISCV/rvv/masked-tuma.ll +++ b/llvm/test/CodeGen/RISCV/rvv/masked-tuma.ll @@ -478,7 +478,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( , , , @@ -494,7 +494,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( %0, %1, %2, diff --git a/llvm/test/CodeGen/RISCV/rvv/masked-tumu.ll b/llvm/test/CodeGen/RISCV/rvv/masked-tumu.ll --- a/llvm/test/CodeGen/RISCV/rvv/masked-tumu.ll +++ b/llvm/test/CodeGen/RISCV/rvv/masked-tumu.ll @@ -478,7 +478,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( , , , @@ -494,7 +494,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( %0, %1, %2, diff --git a/llvm/test/CodeGen/RISCV/rvv/vaadd.ll b/llvm/test/CodeGen/RISCV/rvv/vaadd.ll --- a/llvm/test/CodeGen/RISCV/rvv/vaadd.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vaadd.ll @@ -3,7 +3,7 @@ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV32 ; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v \ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV64 -declare @llvm.riscv.vaadd.rm.nxv1i8.nxv1i8( +declare @llvm.riscv.vaadd.nxv1i8.nxv1i8( , , , @@ -17,7 +17,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaadd.nxv1i8.nxv1i8( undef, %0, %1, @@ -26,7 +26,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( , , , @@ -41,7 +41,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaadd.mask.nxv1i8.nxv1i8( %0, %1, %2, @@ -51,7 +51,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i8.nxv2i8( +declare @llvm.riscv.vaadd.nxv2i8.nxv2i8( , , , @@ -65,7 +65,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vaadd.nxv2i8.nxv2i8( undef, %0, %1, @@ -74,7 +74,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i8.nxv2i8( +declare @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8( , , , @@ -89,7 +89,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vaadd.mask.nxv2i8.nxv2i8( %0, %1, %2, @@ -99,7 +99,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i8.nxv4i8( +declare @llvm.riscv.vaadd.nxv4i8.nxv4i8( , , , @@ -113,7 +113,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vaadd.nxv4i8.nxv4i8( undef, %0, %1, @@ -122,7 +122,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i8.nxv4i8( +declare @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8( , , , @@ -137,7 +137,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vaadd.mask.nxv4i8.nxv4i8( %0, %1, %2, @@ -147,7 +147,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i8.nxv8i8( +declare @llvm.riscv.vaadd.nxv8i8.nxv8i8( , , , @@ -161,7 +161,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vaadd.nxv8i8.nxv8i8( undef, %0, %1, @@ -170,7 +170,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i8.nxv8i8( +declare @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8( , , , @@ -185,7 +185,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vaadd.mask.nxv8i8.nxv8i8( %0, %1, %2, @@ -195,7 +195,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv16i8.nxv16i8( +declare @llvm.riscv.vaadd.nxv16i8.nxv16i8( , , , @@ -209,7 +209,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vaadd.nxv16i8.nxv16i8( undef, %0, %1, @@ -218,7 +218,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv16i8.nxv16i8( +declare @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8( , , , @@ -233,7 +233,7 @@ ; CHECK-NEXT: vaadd.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vaadd.mask.nxv16i8.nxv16i8( %0, %1, %2, @@ -243,7 +243,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv32i8.nxv32i8( +declare @llvm.riscv.vaadd.nxv32i8.nxv32i8( , , , @@ -257,7 +257,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vaadd.nxv32i8.nxv32i8( undef, %0, %1, @@ -266,7 +266,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv32i8.nxv32i8( +declare @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8( , , , @@ -281,7 +281,7 @@ ; CHECK-NEXT: vaadd.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vaadd.mask.nxv32i8.nxv32i8( %0, %1, %2, @@ -291,7 +291,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv64i8.nxv64i8( +declare @llvm.riscv.vaadd.nxv64i8.nxv64i8( , , , @@ -305,7 +305,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vaadd.nxv64i8.nxv64i8( undef, %0, %1, @@ -314,7 +314,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv64i8.nxv64i8( +declare @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8( , , , @@ -330,7 +330,7 @@ ; CHECK-NEXT: vaadd.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vaadd.mask.nxv64i8.nxv64i8( %0, %1, %2, @@ -340,7 +340,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i16.nxv1i16( +declare @llvm.riscv.vaadd.nxv1i16.nxv1i16( , , , @@ -354,7 +354,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vaadd.nxv1i16.nxv1i16( undef, %0, %1, @@ -363,7 +363,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i16.nxv1i16( +declare @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16( , , , @@ -378,7 +378,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vaadd.mask.nxv1i16.nxv1i16( %0, %1, %2, @@ -388,7 +388,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i16.nxv2i16( +declare @llvm.riscv.vaadd.nxv2i16.nxv2i16( , , , @@ -402,7 +402,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vaadd.nxv2i16.nxv2i16( undef, %0, %1, @@ -411,7 +411,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i16.nxv2i16( +declare @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16( , , , @@ -426,7 +426,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vaadd.mask.nxv2i16.nxv2i16( %0, %1, %2, @@ -436,7 +436,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i16.nxv4i16( +declare @llvm.riscv.vaadd.nxv4i16.nxv4i16( , , , @@ -450,7 +450,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vaadd.nxv4i16.nxv4i16( undef, %0, %1, @@ -459,7 +459,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i16.nxv4i16( +declare @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16( , , , @@ -474,7 +474,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vaadd.mask.nxv4i16.nxv4i16( %0, %1, %2, @@ -484,7 +484,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i16.nxv8i16( +declare @llvm.riscv.vaadd.nxv8i16.nxv8i16( , , , @@ -498,7 +498,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vaadd.nxv8i16.nxv8i16( undef, %0, %1, @@ -507,7 +507,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i16.nxv8i16( +declare @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16( , , , @@ -522,7 +522,7 @@ ; CHECK-NEXT: vaadd.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vaadd.mask.nxv8i16.nxv8i16( %0, %1, %2, @@ -532,7 +532,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv16i16.nxv16i16( +declare @llvm.riscv.vaadd.nxv16i16.nxv16i16( , , , @@ -546,7 +546,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vaadd.nxv16i16.nxv16i16( undef, %0, %1, @@ -555,7 +555,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv16i16.nxv16i16( +declare @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16( , , , @@ -570,7 +570,7 @@ ; CHECK-NEXT: vaadd.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vaadd.mask.nxv16i16.nxv16i16( %0, %1, %2, @@ -580,7 +580,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv32i16.nxv32i16( +declare @llvm.riscv.vaadd.nxv32i16.nxv32i16( , , , @@ -594,7 +594,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vaadd.nxv32i16.nxv32i16( undef, %0, %1, @@ -603,7 +603,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv32i16.nxv32i16( +declare @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16( , , , @@ -619,7 +619,7 @@ ; CHECK-NEXT: vaadd.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vaadd.mask.nxv32i16.nxv32i16( %0, %1, %2, @@ -629,7 +629,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i32.nxv1i32( +declare @llvm.riscv.vaadd.nxv1i32.nxv1i32( , , , @@ -643,7 +643,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vaadd.nxv1i32.nxv1i32( undef, %0, %1, @@ -652,7 +652,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i32.nxv1i32( +declare @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32( , , , @@ -667,7 +667,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vaadd.mask.nxv1i32.nxv1i32( %0, %1, %2, @@ -677,7 +677,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i32.nxv2i32( +declare @llvm.riscv.vaadd.nxv2i32.nxv2i32( , , , @@ -691,7 +691,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vaadd.nxv2i32.nxv2i32( undef, %0, %1, @@ -700,7 +700,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i32.nxv2i32( +declare @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32( , , , @@ -715,7 +715,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vaadd.mask.nxv2i32.nxv2i32( %0, %1, %2, @@ -725,7 +725,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i32.nxv4i32( +declare @llvm.riscv.vaadd.nxv4i32.nxv4i32( , , , @@ -739,7 +739,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vaadd.nxv4i32.nxv4i32( undef, %0, %1, @@ -748,7 +748,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i32.nxv4i32( +declare @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32( , , , @@ -763,7 +763,7 @@ ; CHECK-NEXT: vaadd.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vaadd.mask.nxv4i32.nxv4i32( %0, %1, %2, @@ -773,7 +773,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i32.nxv8i32( +declare @llvm.riscv.vaadd.nxv8i32.nxv8i32( , , , @@ -787,7 +787,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vaadd.nxv8i32.nxv8i32( undef, %0, %1, @@ -796,7 +796,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i32.nxv8i32( +declare @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32( , , , @@ -811,7 +811,7 @@ ; CHECK-NEXT: vaadd.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vaadd.mask.nxv8i32.nxv8i32( %0, %1, %2, @@ -821,7 +821,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv16i32.nxv16i32( +declare @llvm.riscv.vaadd.nxv16i32.nxv16i32( , , , @@ -835,7 +835,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vaadd.nxv16i32.nxv16i32( undef, %0, %1, @@ -844,7 +844,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv16i32.nxv16i32( +declare @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32( , , , @@ -860,7 +860,7 @@ ; CHECK-NEXT: vaadd.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vaadd.mask.nxv16i32.nxv16i32( %0, %1, %2, @@ -870,7 +870,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i64.nxv1i64( +declare @llvm.riscv.vaadd.nxv1i64.nxv1i64( , , , @@ -884,7 +884,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vaadd.nxv1i64.nxv1i64( undef, %0, %1, @@ -893,7 +893,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i64.nxv1i64( +declare @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64( , , , @@ -908,7 +908,7 @@ ; CHECK-NEXT: vaadd.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vaadd.mask.nxv1i64.nxv1i64( %0, %1, %2, @@ -918,7 +918,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i64.nxv2i64( +declare @llvm.riscv.vaadd.nxv2i64.nxv2i64( , , , @@ -932,7 +932,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vaadd.nxv2i64.nxv2i64( undef, %0, %1, @@ -941,7 +941,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i64.nxv2i64( +declare @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64( , , , @@ -956,7 +956,7 @@ ; CHECK-NEXT: vaadd.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vaadd.mask.nxv2i64.nxv2i64( %0, %1, %2, @@ -966,7 +966,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i64.nxv4i64( +declare @llvm.riscv.vaadd.nxv4i64.nxv4i64( , , , @@ -980,7 +980,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vaadd.nxv4i64.nxv4i64( undef, %0, %1, @@ -989,7 +989,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i64.nxv4i64( +declare @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64( , , , @@ -1004,7 +1004,7 @@ ; CHECK-NEXT: vaadd.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vaadd.mask.nxv4i64.nxv4i64( %0, %1, %2, @@ -1014,7 +1014,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i64.nxv8i64( +declare @llvm.riscv.vaadd.nxv8i64.nxv8i64( , , , @@ -1028,7 +1028,7 @@ ; CHECK-NEXT: vaadd.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vaadd.nxv8i64.nxv8i64( undef, %0, %1, @@ -1037,7 +1037,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i64.nxv8i64( +declare @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64( , , , @@ -1053,7 +1053,7 @@ ; CHECK-NEXT: vaadd.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vaadd.mask.nxv8i64.nxv8i64( %0, %1, %2, @@ -1063,7 +1063,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i8.i8( +declare @llvm.riscv.vaadd.nxv1i8.i8( , , i8, @@ -1077,7 +1077,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i8.i8( + %a = call @llvm.riscv.vaadd.nxv1i8.i8( undef, %0, i8 %1, @@ -1086,7 +1086,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i8.i8( +declare @llvm.riscv.vaadd.mask.nxv1i8.i8( , , i8, @@ -1101,7 +1101,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv1i8.i8( %0, %1, i8 %2, @@ -1111,7 +1111,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i8.i8( +declare @llvm.riscv.vaadd.nxv2i8.i8( , , i8, @@ -1125,7 +1125,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i8.i8( + %a = call @llvm.riscv.vaadd.nxv2i8.i8( undef, %0, i8 %1, @@ -1134,7 +1134,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i8.i8( +declare @llvm.riscv.vaadd.mask.nxv2i8.i8( , , i8, @@ -1149,7 +1149,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv2i8.i8( %0, %1, i8 %2, @@ -1159,7 +1159,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i8.i8( +declare @llvm.riscv.vaadd.nxv4i8.i8( , , i8, @@ -1173,7 +1173,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i8.i8( + %a = call @llvm.riscv.vaadd.nxv4i8.i8( undef, %0, i8 %1, @@ -1182,7 +1182,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i8.i8( +declare @llvm.riscv.vaadd.mask.nxv4i8.i8( , , i8, @@ -1197,7 +1197,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv4i8.i8( %0, %1, i8 %2, @@ -1207,7 +1207,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i8.i8( +declare @llvm.riscv.vaadd.nxv8i8.i8( , , i8, @@ -1221,7 +1221,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i8.i8( + %a = call @llvm.riscv.vaadd.nxv8i8.i8( undef, %0, i8 %1, @@ -1230,7 +1230,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i8.i8( +declare @llvm.riscv.vaadd.mask.nxv8i8.i8( , , i8, @@ -1245,7 +1245,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv8i8.i8( %0, %1, i8 %2, @@ -1255,7 +1255,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv16i8.i8( +declare @llvm.riscv.vaadd.nxv16i8.i8( , , i8, @@ -1269,7 +1269,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv16i8.i8( + %a = call @llvm.riscv.vaadd.nxv16i8.i8( undef, %0, i8 %1, @@ -1278,7 +1278,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv16i8.i8( +declare @llvm.riscv.vaadd.mask.nxv16i8.i8( , , i8, @@ -1293,7 +1293,7 @@ ; CHECK-NEXT: vaadd.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv16i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv16i8.i8( %0, %1, i8 %2, @@ -1303,7 +1303,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv32i8.i8( +declare @llvm.riscv.vaadd.nxv32i8.i8( , , i8, @@ -1317,7 +1317,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv32i8.i8( + %a = call @llvm.riscv.vaadd.nxv32i8.i8( undef, %0, i8 %1, @@ -1326,7 +1326,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv32i8.i8( +declare @llvm.riscv.vaadd.mask.nxv32i8.i8( , , i8, @@ -1341,7 +1341,7 @@ ; CHECK-NEXT: vaadd.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv32i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv32i8.i8( %0, %1, i8 %2, @@ -1351,7 +1351,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv64i8.i8( +declare @llvm.riscv.vaadd.nxv64i8.i8( , , i8, @@ -1365,7 +1365,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv64i8.i8( + %a = call @llvm.riscv.vaadd.nxv64i8.i8( undef, %0, i8 %1, @@ -1374,7 +1374,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv64i8.i8( +declare @llvm.riscv.vaadd.mask.nxv64i8.i8( , , i8, @@ -1389,7 +1389,7 @@ ; CHECK-NEXT: vaadd.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv64i8.i8( + %a = call @llvm.riscv.vaadd.mask.nxv64i8.i8( %0, %1, i8 %2, @@ -1399,7 +1399,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i16.i16( +declare @llvm.riscv.vaadd.nxv1i16.i16( , , i16, @@ -1413,7 +1413,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i16.i16( + %a = call @llvm.riscv.vaadd.nxv1i16.i16( undef, %0, i16 %1, @@ -1422,7 +1422,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i16.i16( +declare @llvm.riscv.vaadd.mask.nxv1i16.i16( , , i16, @@ -1437,7 +1437,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i16.i16( + %a = call @llvm.riscv.vaadd.mask.nxv1i16.i16( %0, %1, i16 %2, @@ -1447,7 +1447,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i16.i16( +declare @llvm.riscv.vaadd.nxv2i16.i16( , , i16, @@ -1461,7 +1461,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i16.i16( + %a = call @llvm.riscv.vaadd.nxv2i16.i16( undef, %0, i16 %1, @@ -1470,7 +1470,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i16.i16( +declare @llvm.riscv.vaadd.mask.nxv2i16.i16( , , i16, @@ -1485,7 +1485,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i16.i16( + %a = call @llvm.riscv.vaadd.mask.nxv2i16.i16( %0, %1, i16 %2, @@ -1495,7 +1495,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i16.i16( +declare @llvm.riscv.vaadd.nxv4i16.i16( , , i16, @@ -1509,7 +1509,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i16.i16( + %a = call @llvm.riscv.vaadd.nxv4i16.i16( undef, %0, i16 %1, @@ -1518,7 +1518,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i16.i16( +declare @llvm.riscv.vaadd.mask.nxv4i16.i16( , , i16, @@ -1533,7 +1533,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i16.i16( + %a = call @llvm.riscv.vaadd.mask.nxv4i16.i16( %0, %1, i16 %2, @@ -1543,7 +1543,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i16.i16( +declare @llvm.riscv.vaadd.nxv8i16.i16( , , i16, @@ -1557,7 +1557,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i16.i16( + %a = call @llvm.riscv.vaadd.nxv8i16.i16( undef, %0, i16 %1, @@ -1566,7 +1566,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i16.i16( +declare @llvm.riscv.vaadd.mask.nxv8i16.i16( , , i16, @@ -1581,7 +1581,7 @@ ; CHECK-NEXT: vaadd.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i16.i16( + %a = call @llvm.riscv.vaadd.mask.nxv8i16.i16( %0, %1, i16 %2, @@ -1591,7 +1591,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv16i16.i16( +declare @llvm.riscv.vaadd.nxv16i16.i16( , , i16, @@ -1605,7 +1605,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv16i16.i16( + %a = call @llvm.riscv.vaadd.nxv16i16.i16( undef, %0, i16 %1, @@ -1614,7 +1614,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv16i16.i16( +declare @llvm.riscv.vaadd.mask.nxv16i16.i16( , , i16, @@ -1629,7 +1629,7 @@ ; CHECK-NEXT: vaadd.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv16i16.i16( + %a = call @llvm.riscv.vaadd.mask.nxv16i16.i16( %0, %1, i16 %2, @@ -1639,7 +1639,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv32i16.i16( +declare @llvm.riscv.vaadd.nxv32i16.i16( , , i16, @@ -1653,7 +1653,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv32i16.i16( + %a = call @llvm.riscv.vaadd.nxv32i16.i16( undef, %0, i16 %1, @@ -1662,7 +1662,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv32i16.i16( +declare @llvm.riscv.vaadd.mask.nxv32i16.i16( , , i16, @@ -1677,7 +1677,7 @@ ; CHECK-NEXT: vaadd.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv32i16.i16( + %a = call @llvm.riscv.vaadd.mask.nxv32i16.i16( %0, %1, i16 %2, @@ -1687,7 +1687,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i32.i32( +declare @llvm.riscv.vaadd.nxv1i32.i32( , , i32, @@ -1701,7 +1701,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i32.i32( + %a = call @llvm.riscv.vaadd.nxv1i32.i32( undef, %0, i32 %1, @@ -1710,7 +1710,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i32.i32( +declare @llvm.riscv.vaadd.mask.nxv1i32.i32( , , i32, @@ -1725,7 +1725,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i32.i32( + %a = call @llvm.riscv.vaadd.mask.nxv1i32.i32( %0, %1, i32 %2, @@ -1735,7 +1735,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i32.i32( +declare @llvm.riscv.vaadd.nxv2i32.i32( , , i32, @@ -1749,7 +1749,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i32.i32( + %a = call @llvm.riscv.vaadd.nxv2i32.i32( undef, %0, i32 %1, @@ -1758,7 +1758,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i32.i32( +declare @llvm.riscv.vaadd.mask.nxv2i32.i32( , , i32, @@ -1773,7 +1773,7 @@ ; CHECK-NEXT: vaadd.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i32.i32( + %a = call @llvm.riscv.vaadd.mask.nxv2i32.i32( %0, %1, i32 %2, @@ -1783,7 +1783,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i32.i32( +declare @llvm.riscv.vaadd.nxv4i32.i32( , , i32, @@ -1797,7 +1797,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i32.i32( + %a = call @llvm.riscv.vaadd.nxv4i32.i32( undef, %0, i32 %1, @@ -1806,7 +1806,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i32.i32( +declare @llvm.riscv.vaadd.mask.nxv4i32.i32( , , i32, @@ -1821,7 +1821,7 @@ ; CHECK-NEXT: vaadd.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i32.i32( + %a = call @llvm.riscv.vaadd.mask.nxv4i32.i32( %0, %1, i32 %2, @@ -1831,7 +1831,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i32.i32( +declare @llvm.riscv.vaadd.nxv8i32.i32( , , i32, @@ -1845,7 +1845,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i32.i32( + %a = call @llvm.riscv.vaadd.nxv8i32.i32( undef, %0, i32 %1, @@ -1854,7 +1854,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i32.i32( +declare @llvm.riscv.vaadd.mask.nxv8i32.i32( , , i32, @@ -1869,7 +1869,7 @@ ; CHECK-NEXT: vaadd.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i32.i32( + %a = call @llvm.riscv.vaadd.mask.nxv8i32.i32( %0, %1, i32 %2, @@ -1879,7 +1879,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv16i32.i32( +declare @llvm.riscv.vaadd.nxv16i32.i32( , , i32, @@ -1893,7 +1893,7 @@ ; CHECK-NEXT: vaadd.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv16i32.i32( + %a = call @llvm.riscv.vaadd.nxv16i32.i32( undef, %0, i32 %1, @@ -1902,7 +1902,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv16i32.i32( +declare @llvm.riscv.vaadd.mask.nxv16i32.i32( , , i32, @@ -1917,7 +1917,7 @@ ; CHECK-NEXT: vaadd.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv16i32.i32( + %a = call @llvm.riscv.vaadd.mask.nxv16i32.i32( %0, %1, i32 %2, @@ -1927,7 +1927,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv1i64.i64( +declare @llvm.riscv.vaadd.nxv1i64.i64( , , i64, @@ -1954,7 +1954,7 @@ ; RV64-NEXT: vaadd.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv1i64.i64( + %a = call @llvm.riscv.vaadd.nxv1i64.i64( undef, %0, i64 %1, @@ -1963,7 +1963,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv1i64.i64( +declare @llvm.riscv.vaadd.mask.nxv1i64.i64( , , i64, @@ -1991,7 +1991,7 @@ ; RV64-NEXT: vaadd.vx v8, v9, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv1i64.i64( + %a = call @llvm.riscv.vaadd.mask.nxv1i64.i64( %0, %1, i64 %2, @@ -2001,7 +2001,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv2i64.i64( +declare @llvm.riscv.vaadd.nxv2i64.i64( , , i64, @@ -2028,7 +2028,7 @@ ; RV64-NEXT: vaadd.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv2i64.i64( + %a = call @llvm.riscv.vaadd.nxv2i64.i64( undef, %0, i64 %1, @@ -2037,7 +2037,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv2i64.i64( +declare @llvm.riscv.vaadd.mask.nxv2i64.i64( , , i64, @@ -2065,7 +2065,7 @@ ; RV64-NEXT: vaadd.vx v8, v10, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv2i64.i64( + %a = call @llvm.riscv.vaadd.mask.nxv2i64.i64( %0, %1, i64 %2, @@ -2075,7 +2075,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv4i64.i64( +declare @llvm.riscv.vaadd.nxv4i64.i64( , , i64, @@ -2102,7 +2102,7 @@ ; RV64-NEXT: vaadd.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv4i64.i64( + %a = call @llvm.riscv.vaadd.nxv4i64.i64( undef, %0, i64 %1, @@ -2111,7 +2111,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv4i64.i64( +declare @llvm.riscv.vaadd.mask.nxv4i64.i64( , , i64, @@ -2139,7 +2139,7 @@ ; RV64-NEXT: vaadd.vx v8, v12, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv4i64.i64( + %a = call @llvm.riscv.vaadd.mask.nxv4i64.i64( %0, %1, i64 %2, @@ -2149,7 +2149,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.nxv8i64.i64( +declare @llvm.riscv.vaadd.nxv8i64.i64( , , i64, @@ -2176,7 +2176,7 @@ ; RV64-NEXT: vaadd.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.nxv8i64.i64( + %a = call @llvm.riscv.vaadd.nxv8i64.i64( undef, %0, i64 %1, @@ -2185,7 +2185,7 @@ ret %a } -declare @llvm.riscv.vaadd.rm.mask.nxv8i64.i64( +declare @llvm.riscv.vaadd.mask.nxv8i64.i64( , , i64, @@ -2213,7 +2213,7 @@ ; RV64-NEXT: vaadd.vx v8, v16, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaadd.rm.mask.nxv8i64.i64( + %a = call @llvm.riscv.vaadd.mask.nxv8i64.i64( %0, %1, i64 %2, diff --git a/llvm/test/CodeGen/RISCV/rvv/vaaddu.ll b/llvm/test/CodeGen/RISCV/rvv/vaaddu.ll --- a/llvm/test/CodeGen/RISCV/rvv/vaaddu.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vaaddu.ll @@ -3,7 +3,7 @@ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV32 ; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v \ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV64 -declare @llvm.riscv.vaaddu.rm.nxv1i8.nxv1i8( +declare @llvm.riscv.vaaddu.nxv1i8.nxv1i8( , , , @@ -17,7 +17,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaaddu.nxv1i8.nxv1i8( undef, %0, %1, @@ -26,7 +26,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8( , , , @@ -41,7 +41,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vaaddu.mask.nxv1i8.nxv1i8( %0, %1, %2, @@ -51,7 +51,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i8.nxv2i8( +declare @llvm.riscv.vaaddu.nxv2i8.nxv2i8( , , , @@ -65,7 +65,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vaaddu.nxv2i8.nxv2i8( undef, %0, %1, @@ -74,7 +74,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i8.nxv2i8( +declare @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8( , , , @@ -89,7 +89,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vaaddu.mask.nxv2i8.nxv2i8( %0, %1, %2, @@ -99,7 +99,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i8.nxv4i8( +declare @llvm.riscv.vaaddu.nxv4i8.nxv4i8( , , , @@ -113,7 +113,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vaaddu.nxv4i8.nxv4i8( undef, %0, %1, @@ -122,7 +122,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i8.nxv4i8( +declare @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8( , , , @@ -137,7 +137,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vaaddu.mask.nxv4i8.nxv4i8( %0, %1, %2, @@ -147,7 +147,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i8.nxv8i8( +declare @llvm.riscv.vaaddu.nxv8i8.nxv8i8( , , , @@ -161,7 +161,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vaaddu.nxv8i8.nxv8i8( undef, %0, %1, @@ -170,7 +170,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i8.nxv8i8( +declare @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8( , , , @@ -185,7 +185,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vaaddu.mask.nxv8i8.nxv8i8( %0, %1, %2, @@ -195,7 +195,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv16i8.nxv16i8( +declare @llvm.riscv.vaaddu.nxv16i8.nxv16i8( , , , @@ -209,7 +209,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vaaddu.nxv16i8.nxv16i8( undef, %0, %1, @@ -218,7 +218,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv16i8.nxv16i8( +declare @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8( , , , @@ -233,7 +233,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vaaddu.mask.nxv16i8.nxv16i8( %0, %1, %2, @@ -243,7 +243,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv32i8.nxv32i8( +declare @llvm.riscv.vaaddu.nxv32i8.nxv32i8( , , , @@ -257,7 +257,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vaaddu.nxv32i8.nxv32i8( undef, %0, %1, @@ -266,7 +266,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv32i8.nxv32i8( +declare @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8( , , , @@ -281,7 +281,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vaaddu.mask.nxv32i8.nxv32i8( %0, %1, %2, @@ -291,7 +291,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv64i8.nxv64i8( +declare @llvm.riscv.vaaddu.nxv64i8.nxv64i8( , , , @@ -305,7 +305,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vaaddu.nxv64i8.nxv64i8( undef, %0, %1, @@ -314,7 +314,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv64i8.nxv64i8( +declare @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8( , , , @@ -330,7 +330,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vaaddu.mask.nxv64i8.nxv64i8( %0, %1, %2, @@ -340,7 +340,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i16.nxv1i16( +declare @llvm.riscv.vaaddu.nxv1i16.nxv1i16( , , , @@ -354,7 +354,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vaaddu.nxv1i16.nxv1i16( undef, %0, %1, @@ -363,7 +363,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i16.nxv1i16( +declare @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16( , , , @@ -378,7 +378,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vaaddu.mask.nxv1i16.nxv1i16( %0, %1, %2, @@ -388,7 +388,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i16.nxv2i16( +declare @llvm.riscv.vaaddu.nxv2i16.nxv2i16( , , , @@ -402,7 +402,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vaaddu.nxv2i16.nxv2i16( undef, %0, %1, @@ -411,7 +411,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i16.nxv2i16( +declare @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16( , , , @@ -426,7 +426,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vaaddu.mask.nxv2i16.nxv2i16( %0, %1, %2, @@ -436,7 +436,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i16.nxv4i16( +declare @llvm.riscv.vaaddu.nxv4i16.nxv4i16( , , , @@ -450,7 +450,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vaaddu.nxv4i16.nxv4i16( undef, %0, %1, @@ -459,7 +459,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i16.nxv4i16( +declare @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16( , , , @@ -474,7 +474,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vaaddu.mask.nxv4i16.nxv4i16( %0, %1, %2, @@ -484,7 +484,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i16.nxv8i16( +declare @llvm.riscv.vaaddu.nxv8i16.nxv8i16( , , , @@ -498,7 +498,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vaaddu.nxv8i16.nxv8i16( undef, %0, %1, @@ -507,7 +507,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i16.nxv8i16( +declare @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16( , , , @@ -522,7 +522,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vaaddu.mask.nxv8i16.nxv8i16( %0, %1, %2, @@ -532,7 +532,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv16i16.nxv16i16( +declare @llvm.riscv.vaaddu.nxv16i16.nxv16i16( , , , @@ -546,7 +546,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vaaddu.nxv16i16.nxv16i16( undef, %0, %1, @@ -555,7 +555,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv16i16.nxv16i16( +declare @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16( , , , @@ -570,7 +570,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vaaddu.mask.nxv16i16.nxv16i16( %0, %1, %2, @@ -580,7 +580,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv32i16.nxv32i16( +declare @llvm.riscv.vaaddu.nxv32i16.nxv32i16( , , , @@ -594,7 +594,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vaaddu.nxv32i16.nxv32i16( undef, %0, %1, @@ -603,7 +603,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv32i16.nxv32i16( +declare @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16( , , , @@ -619,7 +619,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vaaddu.mask.nxv32i16.nxv32i16( %0, %1, %2, @@ -629,7 +629,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i32.nxv1i32( +declare @llvm.riscv.vaaddu.nxv1i32.nxv1i32( , , , @@ -643,7 +643,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vaaddu.nxv1i32.nxv1i32( undef, %0, %1, @@ -652,7 +652,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i32.nxv1i32( +declare @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32( , , , @@ -667,7 +667,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vaaddu.mask.nxv1i32.nxv1i32( %0, %1, %2, @@ -677,7 +677,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i32.nxv2i32( +declare @llvm.riscv.vaaddu.nxv2i32.nxv2i32( , , , @@ -691,7 +691,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vaaddu.nxv2i32.nxv2i32( undef, %0, %1, @@ -700,7 +700,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i32.nxv2i32( +declare @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32( , , , @@ -715,7 +715,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vaaddu.mask.nxv2i32.nxv2i32( %0, %1, %2, @@ -725,7 +725,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i32.nxv4i32( +declare @llvm.riscv.vaaddu.nxv4i32.nxv4i32( , , , @@ -739,7 +739,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vaaddu.nxv4i32.nxv4i32( undef, %0, %1, @@ -748,7 +748,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i32.nxv4i32( +declare @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32( , , , @@ -763,7 +763,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vaaddu.mask.nxv4i32.nxv4i32( %0, %1, %2, @@ -773,7 +773,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i32.nxv8i32( +declare @llvm.riscv.vaaddu.nxv8i32.nxv8i32( , , , @@ -787,7 +787,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vaaddu.nxv8i32.nxv8i32( undef, %0, %1, @@ -796,7 +796,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i32.nxv8i32( +declare @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32( , , , @@ -811,7 +811,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vaaddu.mask.nxv8i32.nxv8i32( %0, %1, %2, @@ -821,7 +821,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv16i32.nxv16i32( +declare @llvm.riscv.vaaddu.nxv16i32.nxv16i32( , , , @@ -835,7 +835,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vaaddu.nxv16i32.nxv16i32( undef, %0, %1, @@ -844,7 +844,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv16i32.nxv16i32( +declare @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32( , , , @@ -860,7 +860,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vaaddu.mask.nxv16i32.nxv16i32( %0, %1, %2, @@ -870,7 +870,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i64.nxv1i64( +declare @llvm.riscv.vaaddu.nxv1i64.nxv1i64( , , , @@ -884,7 +884,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vaaddu.nxv1i64.nxv1i64( undef, %0, %1, @@ -893,7 +893,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i64.nxv1i64( +declare @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64( , , , @@ -908,7 +908,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vaaddu.mask.nxv1i64.nxv1i64( %0, %1, %2, @@ -918,7 +918,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i64.nxv2i64( +declare @llvm.riscv.vaaddu.nxv2i64.nxv2i64( , , , @@ -932,7 +932,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vaaddu.nxv2i64.nxv2i64( undef, %0, %1, @@ -941,7 +941,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i64.nxv2i64( +declare @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64( , , , @@ -956,7 +956,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vaaddu.mask.nxv2i64.nxv2i64( %0, %1, %2, @@ -966,7 +966,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i64.nxv4i64( +declare @llvm.riscv.vaaddu.nxv4i64.nxv4i64( , , , @@ -980,7 +980,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vaaddu.nxv4i64.nxv4i64( undef, %0, %1, @@ -989,7 +989,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i64.nxv4i64( +declare @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64( , , , @@ -1004,7 +1004,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vaaddu.mask.nxv4i64.nxv4i64( %0, %1, %2, @@ -1014,7 +1014,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i64.nxv8i64( +declare @llvm.riscv.vaaddu.nxv8i64.nxv8i64( , , , @@ -1028,7 +1028,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vaaddu.nxv8i64.nxv8i64( undef, %0, %1, @@ -1037,7 +1037,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i64.nxv8i64( +declare @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64( , , , @@ -1053,7 +1053,7 @@ ; CHECK-NEXT: vaaddu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vaaddu.mask.nxv8i64.nxv8i64( %0, %1, %2, @@ -1063,7 +1063,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i8.i8( +declare @llvm.riscv.vaaddu.nxv1i8.i8( , , i8, @@ -1077,7 +1077,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i8.i8( + %a = call @llvm.riscv.vaaddu.nxv1i8.i8( undef, %0, i8 %1, @@ -1086,7 +1086,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv1i8.i8( , , i8, @@ -1101,7 +1101,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv1i8.i8( %0, %1, i8 %2, @@ -1111,7 +1111,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i8.i8( +declare @llvm.riscv.vaaddu.nxv2i8.i8( , , i8, @@ -1125,7 +1125,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i8.i8( + %a = call @llvm.riscv.vaaddu.nxv2i8.i8( undef, %0, i8 %1, @@ -1134,7 +1134,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv2i8.i8( , , i8, @@ -1149,7 +1149,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv2i8.i8( %0, %1, i8 %2, @@ -1159,7 +1159,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i8.i8( +declare @llvm.riscv.vaaddu.nxv4i8.i8( , , i8, @@ -1173,7 +1173,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i8.i8( + %a = call @llvm.riscv.vaaddu.nxv4i8.i8( undef, %0, i8 %1, @@ -1182,7 +1182,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv4i8.i8( , , i8, @@ -1197,7 +1197,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv4i8.i8( %0, %1, i8 %2, @@ -1207,7 +1207,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i8.i8( +declare @llvm.riscv.vaaddu.nxv8i8.i8( , , i8, @@ -1221,7 +1221,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i8.i8( + %a = call @llvm.riscv.vaaddu.nxv8i8.i8( undef, %0, i8 %1, @@ -1230,7 +1230,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv8i8.i8( , , i8, @@ -1245,7 +1245,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv8i8.i8( %0, %1, i8 %2, @@ -1255,7 +1255,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv16i8.i8( +declare @llvm.riscv.vaaddu.nxv16i8.i8( , , i8, @@ -1269,7 +1269,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv16i8.i8( + %a = call @llvm.riscv.vaaddu.nxv16i8.i8( undef, %0, i8 %1, @@ -1278,7 +1278,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv16i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv16i8.i8( , , i8, @@ -1293,7 +1293,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv16i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv16i8.i8( %0, %1, i8 %2, @@ -1303,7 +1303,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv32i8.i8( +declare @llvm.riscv.vaaddu.nxv32i8.i8( , , i8, @@ -1317,7 +1317,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv32i8.i8( + %a = call @llvm.riscv.vaaddu.nxv32i8.i8( undef, %0, i8 %1, @@ -1326,7 +1326,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv32i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv32i8.i8( , , i8, @@ -1341,7 +1341,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv32i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv32i8.i8( %0, %1, i8 %2, @@ -1351,7 +1351,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv64i8.i8( +declare @llvm.riscv.vaaddu.nxv64i8.i8( , , i8, @@ -1365,7 +1365,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv64i8.i8( + %a = call @llvm.riscv.vaaddu.nxv64i8.i8( undef, %0, i8 %1, @@ -1374,7 +1374,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv64i8.i8( +declare @llvm.riscv.vaaddu.mask.nxv64i8.i8( , , i8, @@ -1389,7 +1389,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv64i8.i8( + %a = call @llvm.riscv.vaaddu.mask.nxv64i8.i8( %0, %1, i8 %2, @@ -1399,7 +1399,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i16.i16( +declare @llvm.riscv.vaaddu.nxv1i16.i16( , , i16, @@ -1413,7 +1413,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i16.i16( + %a = call @llvm.riscv.vaaddu.nxv1i16.i16( undef, %0, i16 %1, @@ -1422,7 +1422,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i16.i16( +declare @llvm.riscv.vaaddu.mask.nxv1i16.i16( , , i16, @@ -1437,7 +1437,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i16.i16( + %a = call @llvm.riscv.vaaddu.mask.nxv1i16.i16( %0, %1, i16 %2, @@ -1447,7 +1447,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i16.i16( +declare @llvm.riscv.vaaddu.nxv2i16.i16( , , i16, @@ -1461,7 +1461,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i16.i16( + %a = call @llvm.riscv.vaaddu.nxv2i16.i16( undef, %0, i16 %1, @@ -1470,7 +1470,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i16.i16( +declare @llvm.riscv.vaaddu.mask.nxv2i16.i16( , , i16, @@ -1485,7 +1485,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i16.i16( + %a = call @llvm.riscv.vaaddu.mask.nxv2i16.i16( %0, %1, i16 %2, @@ -1495,7 +1495,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i16.i16( +declare @llvm.riscv.vaaddu.nxv4i16.i16( , , i16, @@ -1509,7 +1509,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i16.i16( + %a = call @llvm.riscv.vaaddu.nxv4i16.i16( undef, %0, i16 %1, @@ -1518,7 +1518,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i16.i16( +declare @llvm.riscv.vaaddu.mask.nxv4i16.i16( , , i16, @@ -1533,7 +1533,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i16.i16( + %a = call @llvm.riscv.vaaddu.mask.nxv4i16.i16( %0, %1, i16 %2, @@ -1543,7 +1543,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i16.i16( +declare @llvm.riscv.vaaddu.nxv8i16.i16( , , i16, @@ -1557,7 +1557,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i16.i16( + %a = call @llvm.riscv.vaaddu.nxv8i16.i16( undef, %0, i16 %1, @@ -1566,7 +1566,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i16.i16( +declare @llvm.riscv.vaaddu.mask.nxv8i16.i16( , , i16, @@ -1581,7 +1581,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i16.i16( + %a = call @llvm.riscv.vaaddu.mask.nxv8i16.i16( %0, %1, i16 %2, @@ -1591,7 +1591,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv16i16.i16( +declare @llvm.riscv.vaaddu.nxv16i16.i16( , , i16, @@ -1605,7 +1605,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv16i16.i16( + %a = call @llvm.riscv.vaaddu.nxv16i16.i16( undef, %0, i16 %1, @@ -1614,7 +1614,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv16i16.i16( +declare @llvm.riscv.vaaddu.mask.nxv16i16.i16( , , i16, @@ -1629,7 +1629,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv16i16.i16( + %a = call @llvm.riscv.vaaddu.mask.nxv16i16.i16( %0, %1, i16 %2, @@ -1639,7 +1639,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv32i16.i16( +declare @llvm.riscv.vaaddu.nxv32i16.i16( , , i16, @@ -1653,7 +1653,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv32i16.i16( + %a = call @llvm.riscv.vaaddu.nxv32i16.i16( undef, %0, i16 %1, @@ -1662,7 +1662,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv32i16.i16( +declare @llvm.riscv.vaaddu.mask.nxv32i16.i16( , , i16, @@ -1677,7 +1677,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv32i16.i16( + %a = call @llvm.riscv.vaaddu.mask.nxv32i16.i16( %0, %1, i16 %2, @@ -1687,7 +1687,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i32.i32( +declare @llvm.riscv.vaaddu.nxv1i32.i32( , , i32, @@ -1701,7 +1701,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i32.i32( + %a = call @llvm.riscv.vaaddu.nxv1i32.i32( undef, %0, i32 %1, @@ -1710,7 +1710,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i32.i32( +declare @llvm.riscv.vaaddu.mask.nxv1i32.i32( , , i32, @@ -1725,7 +1725,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i32.i32( + %a = call @llvm.riscv.vaaddu.mask.nxv1i32.i32( %0, %1, i32 %2, @@ -1735,7 +1735,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i32.i32( +declare @llvm.riscv.vaaddu.nxv2i32.i32( , , i32, @@ -1749,7 +1749,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i32.i32( + %a = call @llvm.riscv.vaaddu.nxv2i32.i32( undef, %0, i32 %1, @@ -1758,7 +1758,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i32.i32( +declare @llvm.riscv.vaaddu.mask.nxv2i32.i32( , , i32, @@ -1773,7 +1773,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i32.i32( + %a = call @llvm.riscv.vaaddu.mask.nxv2i32.i32( %0, %1, i32 %2, @@ -1783,7 +1783,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i32.i32( +declare @llvm.riscv.vaaddu.nxv4i32.i32( , , i32, @@ -1797,7 +1797,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i32.i32( + %a = call @llvm.riscv.vaaddu.nxv4i32.i32( undef, %0, i32 %1, @@ -1806,7 +1806,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i32.i32( +declare @llvm.riscv.vaaddu.mask.nxv4i32.i32( , , i32, @@ -1821,7 +1821,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i32.i32( + %a = call @llvm.riscv.vaaddu.mask.nxv4i32.i32( %0, %1, i32 %2, @@ -1831,7 +1831,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i32.i32( +declare @llvm.riscv.vaaddu.nxv8i32.i32( , , i32, @@ -1845,7 +1845,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i32.i32( + %a = call @llvm.riscv.vaaddu.nxv8i32.i32( undef, %0, i32 %1, @@ -1854,7 +1854,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i32.i32( +declare @llvm.riscv.vaaddu.mask.nxv8i32.i32( , , i32, @@ -1869,7 +1869,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i32.i32( + %a = call @llvm.riscv.vaaddu.mask.nxv8i32.i32( %0, %1, i32 %2, @@ -1879,7 +1879,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv16i32.i32( +declare @llvm.riscv.vaaddu.nxv16i32.i32( , , i32, @@ -1893,7 +1893,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv16i32.i32( + %a = call @llvm.riscv.vaaddu.nxv16i32.i32( undef, %0, i32 %1, @@ -1902,7 +1902,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv16i32.i32( +declare @llvm.riscv.vaaddu.mask.nxv16i32.i32( , , i32, @@ -1917,7 +1917,7 @@ ; CHECK-NEXT: vaaddu.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv16i32.i32( + %a = call @llvm.riscv.vaaddu.mask.nxv16i32.i32( %0, %1, i32 %2, @@ -1927,7 +1927,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv1i64.i64( +declare @llvm.riscv.vaaddu.nxv1i64.i64( , , i64, @@ -1954,7 +1954,7 @@ ; RV64-NEXT: vaaddu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv1i64.i64( + %a = call @llvm.riscv.vaaddu.nxv1i64.i64( undef, %0, i64 %1, @@ -1963,7 +1963,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv1i64.i64( +declare @llvm.riscv.vaaddu.mask.nxv1i64.i64( , , i64, @@ -1991,7 +1991,7 @@ ; RV64-NEXT: vaaddu.vx v8, v9, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv1i64.i64( + %a = call @llvm.riscv.vaaddu.mask.nxv1i64.i64( %0, %1, i64 %2, @@ -2001,7 +2001,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv2i64.i64( +declare @llvm.riscv.vaaddu.nxv2i64.i64( , , i64, @@ -2028,7 +2028,7 @@ ; RV64-NEXT: vaaddu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv2i64.i64( + %a = call @llvm.riscv.vaaddu.nxv2i64.i64( undef, %0, i64 %1, @@ -2037,7 +2037,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv2i64.i64( +declare @llvm.riscv.vaaddu.mask.nxv2i64.i64( , , i64, @@ -2065,7 +2065,7 @@ ; RV64-NEXT: vaaddu.vx v8, v10, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv2i64.i64( + %a = call @llvm.riscv.vaaddu.mask.nxv2i64.i64( %0, %1, i64 %2, @@ -2075,7 +2075,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv4i64.i64( +declare @llvm.riscv.vaaddu.nxv4i64.i64( , , i64, @@ -2102,7 +2102,7 @@ ; RV64-NEXT: vaaddu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv4i64.i64( + %a = call @llvm.riscv.vaaddu.nxv4i64.i64( undef, %0, i64 %1, @@ -2111,7 +2111,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv4i64.i64( +declare @llvm.riscv.vaaddu.mask.nxv4i64.i64( , , i64, @@ -2139,7 +2139,7 @@ ; RV64-NEXT: vaaddu.vx v8, v12, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv4i64.i64( + %a = call @llvm.riscv.vaaddu.mask.nxv4i64.i64( %0, %1, i64 %2, @@ -2149,7 +2149,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.nxv8i64.i64( +declare @llvm.riscv.vaaddu.nxv8i64.i64( , , i64, @@ -2176,7 +2176,7 @@ ; RV64-NEXT: vaaddu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.nxv8i64.i64( + %a = call @llvm.riscv.vaaddu.nxv8i64.i64( undef, %0, i64 %1, @@ -2185,7 +2185,7 @@ ret %a } -declare @llvm.riscv.vaaddu.rm.mask.nxv8i64.i64( +declare @llvm.riscv.vaaddu.mask.nxv8i64.i64( , , i64, @@ -2213,7 +2213,7 @@ ; RV64-NEXT: vaaddu.vx v8, v16, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vaaddu.rm.mask.nxv8i64.i64( + %a = call @llvm.riscv.vaaddu.mask.nxv8i64.i64( %0, %1, i64 %2, diff --git a/llvm/test/CodeGen/RISCV/rvv/vasub.ll b/llvm/test/CodeGen/RISCV/rvv/vasub.ll --- a/llvm/test/CodeGen/RISCV/rvv/vasub.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vasub.ll @@ -3,7 +3,7 @@ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV32 ; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v \ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV64 -declare @llvm.riscv.vasub.rm.nxv1i8.nxv1i8( +declare @llvm.riscv.vasub.nxv1i8.nxv1i8( , , , @@ -17,7 +17,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vasub.nxv1i8.nxv1i8( undef, %0, %1, @@ -26,7 +26,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vasub.mask.nxv1i8.nxv1i8( , , , @@ -41,7 +41,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vasub.mask.nxv1i8.nxv1i8( %0, %1, %2, @@ -51,7 +51,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i8.nxv2i8( +declare @llvm.riscv.vasub.nxv2i8.nxv2i8( , , , @@ -65,7 +65,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vasub.nxv2i8.nxv2i8( undef, %0, %1, @@ -74,7 +74,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i8.nxv2i8( +declare @llvm.riscv.vasub.mask.nxv2i8.nxv2i8( , , , @@ -89,7 +89,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vasub.mask.nxv2i8.nxv2i8( %0, %1, %2, @@ -99,7 +99,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i8.nxv4i8( +declare @llvm.riscv.vasub.nxv4i8.nxv4i8( , , , @@ -113,7 +113,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vasub.nxv4i8.nxv4i8( undef, %0, %1, @@ -122,7 +122,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i8.nxv4i8( +declare @llvm.riscv.vasub.mask.nxv4i8.nxv4i8( , , , @@ -137,7 +137,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vasub.mask.nxv4i8.nxv4i8( %0, %1, %2, @@ -147,7 +147,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i8.nxv8i8( +declare @llvm.riscv.vasub.nxv8i8.nxv8i8( , , , @@ -161,7 +161,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vasub.nxv8i8.nxv8i8( undef, %0, %1, @@ -170,7 +170,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i8.nxv8i8( +declare @llvm.riscv.vasub.mask.nxv8i8.nxv8i8( , , , @@ -185,7 +185,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vasub.mask.nxv8i8.nxv8i8( %0, %1, %2, @@ -195,7 +195,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv16i8.nxv16i8( +declare @llvm.riscv.vasub.nxv16i8.nxv16i8( , , , @@ -209,7 +209,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vasub.nxv16i8.nxv16i8( undef, %0, %1, @@ -218,7 +218,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv16i8.nxv16i8( +declare @llvm.riscv.vasub.mask.nxv16i8.nxv16i8( , , , @@ -233,7 +233,7 @@ ; CHECK-NEXT: vasub.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vasub.mask.nxv16i8.nxv16i8( %0, %1, %2, @@ -243,7 +243,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv32i8.nxv32i8( +declare @llvm.riscv.vasub.nxv32i8.nxv32i8( , , , @@ -257,7 +257,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vasub.nxv32i8.nxv32i8( undef, %0, %1, @@ -266,7 +266,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv32i8.nxv32i8( +declare @llvm.riscv.vasub.mask.nxv32i8.nxv32i8( , , , @@ -281,7 +281,7 @@ ; CHECK-NEXT: vasub.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vasub.mask.nxv32i8.nxv32i8( %0, %1, %2, @@ -291,7 +291,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv64i8.nxv64i8( +declare @llvm.riscv.vasub.nxv64i8.nxv64i8( , , , @@ -305,7 +305,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vasub.nxv64i8.nxv64i8( undef, %0, %1, @@ -314,7 +314,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv64i8.nxv64i8( +declare @llvm.riscv.vasub.mask.nxv64i8.nxv64i8( , , , @@ -330,7 +330,7 @@ ; CHECK-NEXT: vasub.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vasub.mask.nxv64i8.nxv64i8( %0, %1, %2, @@ -340,7 +340,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i16.nxv1i16( +declare @llvm.riscv.vasub.nxv1i16.nxv1i16( , , , @@ -354,7 +354,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vasub.nxv1i16.nxv1i16( undef, %0, %1, @@ -363,7 +363,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i16.nxv1i16( +declare @llvm.riscv.vasub.mask.nxv1i16.nxv1i16( , , , @@ -378,7 +378,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vasub.mask.nxv1i16.nxv1i16( %0, %1, %2, @@ -388,7 +388,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i16.nxv2i16( +declare @llvm.riscv.vasub.nxv2i16.nxv2i16( , , , @@ -402,7 +402,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vasub.nxv2i16.nxv2i16( undef, %0, %1, @@ -411,7 +411,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i16.nxv2i16( +declare @llvm.riscv.vasub.mask.nxv2i16.nxv2i16( , , , @@ -426,7 +426,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vasub.mask.nxv2i16.nxv2i16( %0, %1, %2, @@ -436,7 +436,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i16.nxv4i16( +declare @llvm.riscv.vasub.nxv4i16.nxv4i16( , , , @@ -450,7 +450,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vasub.nxv4i16.nxv4i16( undef, %0, %1, @@ -459,7 +459,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i16.nxv4i16( +declare @llvm.riscv.vasub.mask.nxv4i16.nxv4i16( , , , @@ -474,7 +474,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vasub.mask.nxv4i16.nxv4i16( %0, %1, %2, @@ -484,7 +484,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i16.nxv8i16( +declare @llvm.riscv.vasub.nxv8i16.nxv8i16( , , , @@ -498,7 +498,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vasub.nxv8i16.nxv8i16( undef, %0, %1, @@ -507,7 +507,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i16.nxv8i16( +declare @llvm.riscv.vasub.mask.nxv8i16.nxv8i16( , , , @@ -522,7 +522,7 @@ ; CHECK-NEXT: vasub.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vasub.mask.nxv8i16.nxv8i16( %0, %1, %2, @@ -532,7 +532,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv16i16.nxv16i16( +declare @llvm.riscv.vasub.nxv16i16.nxv16i16( , , , @@ -546,7 +546,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vasub.nxv16i16.nxv16i16( undef, %0, %1, @@ -555,7 +555,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv16i16.nxv16i16( +declare @llvm.riscv.vasub.mask.nxv16i16.nxv16i16( , , , @@ -570,7 +570,7 @@ ; CHECK-NEXT: vasub.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vasub.mask.nxv16i16.nxv16i16( %0, %1, %2, @@ -580,7 +580,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv32i16.nxv32i16( +declare @llvm.riscv.vasub.nxv32i16.nxv32i16( , , , @@ -594,7 +594,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vasub.nxv32i16.nxv32i16( undef, %0, %1, @@ -603,7 +603,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv32i16.nxv32i16( +declare @llvm.riscv.vasub.mask.nxv32i16.nxv32i16( , , , @@ -619,7 +619,7 @@ ; CHECK-NEXT: vasub.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vasub.mask.nxv32i16.nxv32i16( %0, %1, %2, @@ -629,7 +629,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i32.nxv1i32( +declare @llvm.riscv.vasub.nxv1i32.nxv1i32( , , , @@ -643,7 +643,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vasub.nxv1i32.nxv1i32( undef, %0, %1, @@ -652,7 +652,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i32.nxv1i32( +declare @llvm.riscv.vasub.mask.nxv1i32.nxv1i32( , , , @@ -667,7 +667,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vasub.mask.nxv1i32.nxv1i32( %0, %1, %2, @@ -677,7 +677,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i32.nxv2i32( +declare @llvm.riscv.vasub.nxv2i32.nxv2i32( , , , @@ -691,7 +691,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vasub.nxv2i32.nxv2i32( undef, %0, %1, @@ -700,7 +700,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i32.nxv2i32( +declare @llvm.riscv.vasub.mask.nxv2i32.nxv2i32( , , , @@ -715,7 +715,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vasub.mask.nxv2i32.nxv2i32( %0, %1, %2, @@ -725,7 +725,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i32.nxv4i32( +declare @llvm.riscv.vasub.nxv4i32.nxv4i32( , , , @@ -739,7 +739,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vasub.nxv4i32.nxv4i32( undef, %0, %1, @@ -748,7 +748,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i32.nxv4i32( +declare @llvm.riscv.vasub.mask.nxv4i32.nxv4i32( , , , @@ -763,7 +763,7 @@ ; CHECK-NEXT: vasub.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vasub.mask.nxv4i32.nxv4i32( %0, %1, %2, @@ -773,7 +773,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i32.nxv8i32( +declare @llvm.riscv.vasub.nxv8i32.nxv8i32( , , , @@ -787,7 +787,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vasub.nxv8i32.nxv8i32( undef, %0, %1, @@ -796,7 +796,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i32.nxv8i32( +declare @llvm.riscv.vasub.mask.nxv8i32.nxv8i32( , , , @@ -811,7 +811,7 @@ ; CHECK-NEXT: vasub.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vasub.mask.nxv8i32.nxv8i32( %0, %1, %2, @@ -821,7 +821,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv16i32.nxv16i32( +declare @llvm.riscv.vasub.nxv16i32.nxv16i32( , , , @@ -835,7 +835,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vasub.nxv16i32.nxv16i32( undef, %0, %1, @@ -844,7 +844,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv16i32.nxv16i32( +declare @llvm.riscv.vasub.mask.nxv16i32.nxv16i32( , , , @@ -860,7 +860,7 @@ ; CHECK-NEXT: vasub.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vasub.mask.nxv16i32.nxv16i32( %0, %1, %2, @@ -870,7 +870,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i64.nxv1i64( +declare @llvm.riscv.vasub.nxv1i64.nxv1i64( , , , @@ -884,7 +884,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vasub.nxv1i64.nxv1i64( undef, %0, %1, @@ -893,7 +893,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i64.nxv1i64( +declare @llvm.riscv.vasub.mask.nxv1i64.nxv1i64( , , , @@ -908,7 +908,7 @@ ; CHECK-NEXT: vasub.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vasub.mask.nxv1i64.nxv1i64( %0, %1, %2, @@ -918,7 +918,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i64.nxv2i64( +declare @llvm.riscv.vasub.nxv2i64.nxv2i64( , , , @@ -932,7 +932,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vasub.nxv2i64.nxv2i64( undef, %0, %1, @@ -941,7 +941,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i64.nxv2i64( +declare @llvm.riscv.vasub.mask.nxv2i64.nxv2i64( , , , @@ -956,7 +956,7 @@ ; CHECK-NEXT: vasub.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vasub.mask.nxv2i64.nxv2i64( %0, %1, %2, @@ -966,7 +966,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i64.nxv4i64( +declare @llvm.riscv.vasub.nxv4i64.nxv4i64( , , , @@ -980,7 +980,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vasub.nxv4i64.nxv4i64( undef, %0, %1, @@ -989,7 +989,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i64.nxv4i64( +declare @llvm.riscv.vasub.mask.nxv4i64.nxv4i64( , , , @@ -1004,7 +1004,7 @@ ; CHECK-NEXT: vasub.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vasub.mask.nxv4i64.nxv4i64( %0, %1, %2, @@ -1014,7 +1014,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i64.nxv8i64( +declare @llvm.riscv.vasub.nxv8i64.nxv8i64( , , , @@ -1028,7 +1028,7 @@ ; CHECK-NEXT: vasub.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vasub.nxv8i64.nxv8i64( undef, %0, %1, @@ -1037,7 +1037,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i64.nxv8i64( +declare @llvm.riscv.vasub.mask.nxv8i64.nxv8i64( , , , @@ -1053,7 +1053,7 @@ ; CHECK-NEXT: vasub.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vasub.mask.nxv8i64.nxv8i64( %0, %1, %2, @@ -1063,7 +1063,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i8.i8( +declare @llvm.riscv.vasub.nxv1i8.i8( , , i8, @@ -1077,7 +1077,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i8.i8( + %a = call @llvm.riscv.vasub.nxv1i8.i8( undef, %0, i8 %1, @@ -1086,7 +1086,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i8.i8( +declare @llvm.riscv.vasub.mask.nxv1i8.i8( , , i8, @@ -1101,7 +1101,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv1i8.i8( %0, %1, i8 %2, @@ -1111,7 +1111,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i8.i8( +declare @llvm.riscv.vasub.nxv2i8.i8( , , i8, @@ -1125,7 +1125,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i8.i8( + %a = call @llvm.riscv.vasub.nxv2i8.i8( undef, %0, i8 %1, @@ -1134,7 +1134,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i8.i8( +declare @llvm.riscv.vasub.mask.nxv2i8.i8( , , i8, @@ -1149,7 +1149,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv2i8.i8( %0, %1, i8 %2, @@ -1159,7 +1159,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i8.i8( +declare @llvm.riscv.vasub.nxv4i8.i8( , , i8, @@ -1173,7 +1173,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i8.i8( + %a = call @llvm.riscv.vasub.nxv4i8.i8( undef, %0, i8 %1, @@ -1182,7 +1182,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i8.i8( +declare @llvm.riscv.vasub.mask.nxv4i8.i8( , , i8, @@ -1197,7 +1197,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv4i8.i8( %0, %1, i8 %2, @@ -1207,7 +1207,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i8.i8( +declare @llvm.riscv.vasub.nxv8i8.i8( , , i8, @@ -1221,7 +1221,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i8.i8( + %a = call @llvm.riscv.vasub.nxv8i8.i8( undef, %0, i8 %1, @@ -1230,7 +1230,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i8.i8( +declare @llvm.riscv.vasub.mask.nxv8i8.i8( , , i8, @@ -1245,7 +1245,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv8i8.i8( %0, %1, i8 %2, @@ -1255,7 +1255,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv16i8.i8( +declare @llvm.riscv.vasub.nxv16i8.i8( , , i8, @@ -1269,7 +1269,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv16i8.i8( + %a = call @llvm.riscv.vasub.nxv16i8.i8( undef, %0, i8 %1, @@ -1278,7 +1278,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv16i8.i8( +declare @llvm.riscv.vasub.mask.nxv16i8.i8( , , i8, @@ -1293,7 +1293,7 @@ ; CHECK-NEXT: vasub.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv16i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv16i8.i8( %0, %1, i8 %2, @@ -1303,7 +1303,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv32i8.i8( +declare @llvm.riscv.vasub.nxv32i8.i8( , , i8, @@ -1317,7 +1317,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv32i8.i8( + %a = call @llvm.riscv.vasub.nxv32i8.i8( undef, %0, i8 %1, @@ -1326,7 +1326,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv32i8.i8( +declare @llvm.riscv.vasub.mask.nxv32i8.i8( , , i8, @@ -1341,7 +1341,7 @@ ; CHECK-NEXT: vasub.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv32i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv32i8.i8( %0, %1, i8 %2, @@ -1351,7 +1351,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv64i8.i8( +declare @llvm.riscv.vasub.nxv64i8.i8( , , i8, @@ -1365,7 +1365,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv64i8.i8( + %a = call @llvm.riscv.vasub.nxv64i8.i8( undef, %0, i8 %1, @@ -1374,7 +1374,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv64i8.i8( +declare @llvm.riscv.vasub.mask.nxv64i8.i8( , , i8, @@ -1389,7 +1389,7 @@ ; CHECK-NEXT: vasub.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv64i8.i8( + %a = call @llvm.riscv.vasub.mask.nxv64i8.i8( %0, %1, i8 %2, @@ -1399,7 +1399,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i16.i16( +declare @llvm.riscv.vasub.nxv1i16.i16( , , i16, @@ -1413,7 +1413,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i16.i16( + %a = call @llvm.riscv.vasub.nxv1i16.i16( undef, %0, i16 %1, @@ -1422,7 +1422,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i16.i16( +declare @llvm.riscv.vasub.mask.nxv1i16.i16( , , i16, @@ -1437,7 +1437,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i16.i16( + %a = call @llvm.riscv.vasub.mask.nxv1i16.i16( %0, %1, i16 %2, @@ -1447,7 +1447,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i16.i16( +declare @llvm.riscv.vasub.nxv2i16.i16( , , i16, @@ -1461,7 +1461,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i16.i16( + %a = call @llvm.riscv.vasub.nxv2i16.i16( undef, %0, i16 %1, @@ -1470,7 +1470,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i16.i16( +declare @llvm.riscv.vasub.mask.nxv2i16.i16( , , i16, @@ -1485,7 +1485,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i16.i16( + %a = call @llvm.riscv.vasub.mask.nxv2i16.i16( %0, %1, i16 %2, @@ -1495,7 +1495,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i16.i16( +declare @llvm.riscv.vasub.nxv4i16.i16( , , i16, @@ -1509,7 +1509,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i16.i16( + %a = call @llvm.riscv.vasub.nxv4i16.i16( undef, %0, i16 %1, @@ -1518,7 +1518,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i16.i16( +declare @llvm.riscv.vasub.mask.nxv4i16.i16( , , i16, @@ -1533,7 +1533,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i16.i16( + %a = call @llvm.riscv.vasub.mask.nxv4i16.i16( %0, %1, i16 %2, @@ -1543,7 +1543,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i16.i16( +declare @llvm.riscv.vasub.nxv8i16.i16( , , i16, @@ -1557,7 +1557,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i16.i16( + %a = call @llvm.riscv.vasub.nxv8i16.i16( undef, %0, i16 %1, @@ -1566,7 +1566,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i16.i16( +declare @llvm.riscv.vasub.mask.nxv8i16.i16( , , i16, @@ -1581,7 +1581,7 @@ ; CHECK-NEXT: vasub.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i16.i16( + %a = call @llvm.riscv.vasub.mask.nxv8i16.i16( %0, %1, i16 %2, @@ -1591,7 +1591,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv16i16.i16( +declare @llvm.riscv.vasub.nxv16i16.i16( , , i16, @@ -1605,7 +1605,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv16i16.i16( + %a = call @llvm.riscv.vasub.nxv16i16.i16( undef, %0, i16 %1, @@ -1614,7 +1614,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv16i16.i16( +declare @llvm.riscv.vasub.mask.nxv16i16.i16( , , i16, @@ -1629,7 +1629,7 @@ ; CHECK-NEXT: vasub.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv16i16.i16( + %a = call @llvm.riscv.vasub.mask.nxv16i16.i16( %0, %1, i16 %2, @@ -1639,7 +1639,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv32i16.i16( +declare @llvm.riscv.vasub.nxv32i16.i16( , , i16, @@ -1653,7 +1653,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv32i16.i16( + %a = call @llvm.riscv.vasub.nxv32i16.i16( undef, %0, i16 %1, @@ -1662,7 +1662,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv32i16.i16( +declare @llvm.riscv.vasub.mask.nxv32i16.i16( , , i16, @@ -1677,7 +1677,7 @@ ; CHECK-NEXT: vasub.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv32i16.i16( + %a = call @llvm.riscv.vasub.mask.nxv32i16.i16( %0, %1, i16 %2, @@ -1687,7 +1687,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i32.i32( +declare @llvm.riscv.vasub.nxv1i32.i32( , , i32, @@ -1701,7 +1701,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i32.i32( + %a = call @llvm.riscv.vasub.nxv1i32.i32( undef, %0, i32 %1, @@ -1710,7 +1710,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i32.i32( +declare @llvm.riscv.vasub.mask.nxv1i32.i32( , , i32, @@ -1725,7 +1725,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i32.i32( + %a = call @llvm.riscv.vasub.mask.nxv1i32.i32( %0, %1, i32 %2, @@ -1735,7 +1735,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i32.i32( +declare @llvm.riscv.vasub.nxv2i32.i32( , , i32, @@ -1749,7 +1749,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i32.i32( + %a = call @llvm.riscv.vasub.nxv2i32.i32( undef, %0, i32 %1, @@ -1758,7 +1758,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i32.i32( +declare @llvm.riscv.vasub.mask.nxv2i32.i32( , , i32, @@ -1773,7 +1773,7 @@ ; CHECK-NEXT: vasub.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i32.i32( + %a = call @llvm.riscv.vasub.mask.nxv2i32.i32( %0, %1, i32 %2, @@ -1783,7 +1783,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i32.i32( +declare @llvm.riscv.vasub.nxv4i32.i32( , , i32, @@ -1797,7 +1797,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i32.i32( + %a = call @llvm.riscv.vasub.nxv4i32.i32( undef, %0, i32 %1, @@ -1806,7 +1806,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i32.i32( +declare @llvm.riscv.vasub.mask.nxv4i32.i32( , , i32, @@ -1821,7 +1821,7 @@ ; CHECK-NEXT: vasub.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i32.i32( + %a = call @llvm.riscv.vasub.mask.nxv4i32.i32( %0, %1, i32 %2, @@ -1831,7 +1831,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i32.i32( +declare @llvm.riscv.vasub.nxv8i32.i32( , , i32, @@ -1845,7 +1845,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i32.i32( + %a = call @llvm.riscv.vasub.nxv8i32.i32( undef, %0, i32 %1, @@ -1854,7 +1854,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i32.i32( +declare @llvm.riscv.vasub.mask.nxv8i32.i32( , , i32, @@ -1869,7 +1869,7 @@ ; CHECK-NEXT: vasub.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i32.i32( + %a = call @llvm.riscv.vasub.mask.nxv8i32.i32( %0, %1, i32 %2, @@ -1879,7 +1879,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv16i32.i32( +declare @llvm.riscv.vasub.nxv16i32.i32( , , i32, @@ -1893,7 +1893,7 @@ ; CHECK-NEXT: vasub.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv16i32.i32( + %a = call @llvm.riscv.vasub.nxv16i32.i32( undef, %0, i32 %1, @@ -1902,7 +1902,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv16i32.i32( +declare @llvm.riscv.vasub.mask.nxv16i32.i32( , , i32, @@ -1917,7 +1917,7 @@ ; CHECK-NEXT: vasub.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv16i32.i32( + %a = call @llvm.riscv.vasub.mask.nxv16i32.i32( %0, %1, i32 %2, @@ -1927,7 +1927,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv1i64.i64( +declare @llvm.riscv.vasub.nxv1i64.i64( , , i64, @@ -1954,7 +1954,7 @@ ; RV64-NEXT: vasub.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv1i64.i64( + %a = call @llvm.riscv.vasub.nxv1i64.i64( undef, %0, i64 %1, @@ -1963,7 +1963,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv1i64.i64( +declare @llvm.riscv.vasub.mask.nxv1i64.i64( , , i64, @@ -1991,7 +1991,7 @@ ; RV64-NEXT: vasub.vx v8, v9, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv1i64.i64( + %a = call @llvm.riscv.vasub.mask.nxv1i64.i64( %0, %1, i64 %2, @@ -2001,7 +2001,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv2i64.i64( +declare @llvm.riscv.vasub.nxv2i64.i64( , , i64, @@ -2028,7 +2028,7 @@ ; RV64-NEXT: vasub.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv2i64.i64( + %a = call @llvm.riscv.vasub.nxv2i64.i64( undef, %0, i64 %1, @@ -2037,7 +2037,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv2i64.i64( +declare @llvm.riscv.vasub.mask.nxv2i64.i64( , , i64, @@ -2065,7 +2065,7 @@ ; RV64-NEXT: vasub.vx v8, v10, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv2i64.i64( + %a = call @llvm.riscv.vasub.mask.nxv2i64.i64( %0, %1, i64 %2, @@ -2075,7 +2075,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv4i64.i64( +declare @llvm.riscv.vasub.nxv4i64.i64( , , i64, @@ -2102,7 +2102,7 @@ ; RV64-NEXT: vasub.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv4i64.i64( + %a = call @llvm.riscv.vasub.nxv4i64.i64( undef, %0, i64 %1, @@ -2111,7 +2111,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv4i64.i64( +declare @llvm.riscv.vasub.mask.nxv4i64.i64( , , i64, @@ -2139,7 +2139,7 @@ ; RV64-NEXT: vasub.vx v8, v12, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv4i64.i64( + %a = call @llvm.riscv.vasub.mask.nxv4i64.i64( %0, %1, i64 %2, @@ -2149,7 +2149,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.nxv8i64.i64( +declare @llvm.riscv.vasub.nxv8i64.i64( , , i64, @@ -2176,7 +2176,7 @@ ; RV64-NEXT: vasub.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.nxv8i64.i64( + %a = call @llvm.riscv.vasub.nxv8i64.i64( undef, %0, i64 %1, @@ -2185,7 +2185,7 @@ ret %a } -declare @llvm.riscv.vasub.rm.mask.nxv8i64.i64( +declare @llvm.riscv.vasub.mask.nxv8i64.i64( , , i64, @@ -2213,7 +2213,7 @@ ; RV64-NEXT: vasub.vx v8, v16, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasub.rm.mask.nxv8i64.i64( + %a = call @llvm.riscv.vasub.mask.nxv8i64.i64( %0, %1, i64 %2, diff --git a/llvm/test/CodeGen/RISCV/rvv/vasubu.ll b/llvm/test/CodeGen/RISCV/rvv/vasubu.ll --- a/llvm/test/CodeGen/RISCV/rvv/vasubu.ll +++ b/llvm/test/CodeGen/RISCV/rvv/vasubu.ll @@ -3,7 +3,7 @@ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV32 ; RUN: sed 's/iXLen/i64/g' %s | llc -mtriple=riscv64 -mattr=+v \ ; RUN: -verify-machineinstrs | FileCheck %s --check-prefixes=CHECK,RV64 -declare @llvm.riscv.vasubu.rm.nxv1i8.nxv1i8( +declare @llvm.riscv.vasubu.nxv1i8.nxv1i8( , , , @@ -17,7 +17,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vasubu.nxv1i8.nxv1i8( undef, %0, %1, @@ -26,7 +26,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i8.nxv1i8( +declare @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8( , , , @@ -41,7 +41,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i8.nxv1i8( + %a = call @llvm.riscv.vasubu.mask.nxv1i8.nxv1i8( %0, %1, %2, @@ -51,7 +51,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i8.nxv2i8( +declare @llvm.riscv.vasubu.nxv2i8.nxv2i8( , , , @@ -65,7 +65,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vasubu.nxv2i8.nxv2i8( undef, %0, %1, @@ -74,7 +74,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i8.nxv2i8( +declare @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8( , , , @@ -89,7 +89,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i8.nxv2i8( + %a = call @llvm.riscv.vasubu.mask.nxv2i8.nxv2i8( %0, %1, %2, @@ -99,7 +99,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i8.nxv4i8( +declare @llvm.riscv.vasubu.nxv4i8.nxv4i8( , , , @@ -113,7 +113,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vasubu.nxv4i8.nxv4i8( undef, %0, %1, @@ -122,7 +122,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i8.nxv4i8( +declare @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8( , , , @@ -137,7 +137,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i8.nxv4i8( + %a = call @llvm.riscv.vasubu.mask.nxv4i8.nxv4i8( %0, %1, %2, @@ -147,7 +147,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i8.nxv8i8( +declare @llvm.riscv.vasubu.nxv8i8.nxv8i8( , , , @@ -161,7 +161,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vasubu.nxv8i8.nxv8i8( undef, %0, %1, @@ -170,7 +170,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i8.nxv8i8( +declare @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8( , , , @@ -185,7 +185,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i8.nxv8i8( + %a = call @llvm.riscv.vasubu.mask.nxv8i8.nxv8i8( %0, %1, %2, @@ -195,7 +195,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv16i8.nxv16i8( +declare @llvm.riscv.vasubu.nxv16i8.nxv16i8( , , , @@ -209,7 +209,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vasubu.nxv16i8.nxv16i8( undef, %0, %1, @@ -218,7 +218,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv16i8.nxv16i8( +declare @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8( , , , @@ -233,7 +233,7 @@ ; CHECK-NEXT: vasubu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv16i8.nxv16i8( + %a = call @llvm.riscv.vasubu.mask.nxv16i8.nxv16i8( %0, %1, %2, @@ -243,7 +243,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv32i8.nxv32i8( +declare @llvm.riscv.vasubu.nxv32i8.nxv32i8( , , , @@ -257,7 +257,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vasubu.nxv32i8.nxv32i8( undef, %0, %1, @@ -266,7 +266,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv32i8.nxv32i8( +declare @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8( , , , @@ -281,7 +281,7 @@ ; CHECK-NEXT: vasubu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv32i8.nxv32i8( + %a = call @llvm.riscv.vasubu.mask.nxv32i8.nxv32i8( %0, %1, %2, @@ -291,7 +291,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv64i8.nxv64i8( +declare @llvm.riscv.vasubu.nxv64i8.nxv64i8( , , , @@ -305,7 +305,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vasubu.nxv64i8.nxv64i8( undef, %0, %1, @@ -314,7 +314,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv64i8.nxv64i8( +declare @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8( , , , @@ -330,7 +330,7 @@ ; CHECK-NEXT: vasubu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv64i8.nxv64i8( + %a = call @llvm.riscv.vasubu.mask.nxv64i8.nxv64i8( %0, %1, %2, @@ -340,7 +340,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i16.nxv1i16( +declare @llvm.riscv.vasubu.nxv1i16.nxv1i16( , , , @@ -354,7 +354,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vasubu.nxv1i16.nxv1i16( undef, %0, %1, @@ -363,7 +363,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i16.nxv1i16( +declare @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16( , , , @@ -378,7 +378,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i16.nxv1i16( + %a = call @llvm.riscv.vasubu.mask.nxv1i16.nxv1i16( %0, %1, %2, @@ -388,7 +388,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i16.nxv2i16( +declare @llvm.riscv.vasubu.nxv2i16.nxv2i16( , , , @@ -402,7 +402,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vasubu.nxv2i16.nxv2i16( undef, %0, %1, @@ -411,7 +411,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i16.nxv2i16( +declare @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16( , , , @@ -426,7 +426,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i16.nxv2i16( + %a = call @llvm.riscv.vasubu.mask.nxv2i16.nxv2i16( %0, %1, %2, @@ -436,7 +436,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i16.nxv4i16( +declare @llvm.riscv.vasubu.nxv4i16.nxv4i16( , , , @@ -450,7 +450,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vasubu.nxv4i16.nxv4i16( undef, %0, %1, @@ -459,7 +459,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i16.nxv4i16( +declare @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16( , , , @@ -474,7 +474,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i16.nxv4i16( + %a = call @llvm.riscv.vasubu.mask.nxv4i16.nxv4i16( %0, %1, %2, @@ -484,7 +484,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i16.nxv8i16( +declare @llvm.riscv.vasubu.nxv8i16.nxv8i16( , , , @@ -498,7 +498,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vasubu.nxv8i16.nxv8i16( undef, %0, %1, @@ -507,7 +507,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i16.nxv8i16( +declare @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16( , , , @@ -522,7 +522,7 @@ ; CHECK-NEXT: vasubu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i16.nxv8i16( + %a = call @llvm.riscv.vasubu.mask.nxv8i16.nxv8i16( %0, %1, %2, @@ -532,7 +532,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv16i16.nxv16i16( +declare @llvm.riscv.vasubu.nxv16i16.nxv16i16( , , , @@ -546,7 +546,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vasubu.nxv16i16.nxv16i16( undef, %0, %1, @@ -555,7 +555,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv16i16.nxv16i16( +declare @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16( , , , @@ -570,7 +570,7 @@ ; CHECK-NEXT: vasubu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv16i16.nxv16i16( + %a = call @llvm.riscv.vasubu.mask.nxv16i16.nxv16i16( %0, %1, %2, @@ -580,7 +580,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv32i16.nxv32i16( +declare @llvm.riscv.vasubu.nxv32i16.nxv32i16( , , , @@ -594,7 +594,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vasubu.nxv32i16.nxv32i16( undef, %0, %1, @@ -603,7 +603,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv32i16.nxv32i16( +declare @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16( , , , @@ -619,7 +619,7 @@ ; CHECK-NEXT: vasubu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv32i16.nxv32i16( + %a = call @llvm.riscv.vasubu.mask.nxv32i16.nxv32i16( %0, %1, %2, @@ -629,7 +629,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i32.nxv1i32( +declare @llvm.riscv.vasubu.nxv1i32.nxv1i32( , , , @@ -643,7 +643,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vasubu.nxv1i32.nxv1i32( undef, %0, %1, @@ -652,7 +652,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i32.nxv1i32( +declare @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32( , , , @@ -667,7 +667,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i32.nxv1i32( + %a = call @llvm.riscv.vasubu.mask.nxv1i32.nxv1i32( %0, %1, %2, @@ -677,7 +677,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i32.nxv2i32( +declare @llvm.riscv.vasubu.nxv2i32.nxv2i32( , , , @@ -691,7 +691,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vasubu.nxv2i32.nxv2i32( undef, %0, %1, @@ -700,7 +700,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i32.nxv2i32( +declare @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32( , , , @@ -715,7 +715,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i32.nxv2i32( + %a = call @llvm.riscv.vasubu.mask.nxv2i32.nxv2i32( %0, %1, %2, @@ -725,7 +725,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i32.nxv4i32( +declare @llvm.riscv.vasubu.nxv4i32.nxv4i32( , , , @@ -739,7 +739,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vasubu.nxv4i32.nxv4i32( undef, %0, %1, @@ -748,7 +748,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i32.nxv4i32( +declare @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32( , , , @@ -763,7 +763,7 @@ ; CHECK-NEXT: vasubu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i32.nxv4i32( + %a = call @llvm.riscv.vasubu.mask.nxv4i32.nxv4i32( %0, %1, %2, @@ -773,7 +773,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i32.nxv8i32( +declare @llvm.riscv.vasubu.nxv8i32.nxv8i32( , , , @@ -787,7 +787,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vasubu.nxv8i32.nxv8i32( undef, %0, %1, @@ -796,7 +796,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i32.nxv8i32( +declare @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32( , , , @@ -811,7 +811,7 @@ ; CHECK-NEXT: vasubu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i32.nxv8i32( + %a = call @llvm.riscv.vasubu.mask.nxv8i32.nxv8i32( %0, %1, %2, @@ -821,7 +821,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv16i32.nxv16i32( +declare @llvm.riscv.vasubu.nxv16i32.nxv16i32( , , , @@ -835,7 +835,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vasubu.nxv16i32.nxv16i32( undef, %0, %1, @@ -844,7 +844,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv16i32.nxv16i32( +declare @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32( , , , @@ -860,7 +860,7 @@ ; CHECK-NEXT: vasubu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv16i32.nxv16i32( + %a = call @llvm.riscv.vasubu.mask.nxv16i32.nxv16i32( %0, %1, %2, @@ -870,7 +870,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i64.nxv1i64( +declare @llvm.riscv.vasubu.nxv1i64.nxv1i64( , , , @@ -884,7 +884,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v9 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vasubu.nxv1i64.nxv1i64( undef, %0, %1, @@ -893,7 +893,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i64.nxv1i64( +declare @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64( , , , @@ -908,7 +908,7 @@ ; CHECK-NEXT: vasubu.vv v8, v9, v10, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i64.nxv1i64( + %a = call @llvm.riscv.vasubu.mask.nxv1i64.nxv1i64( %0, %1, %2, @@ -918,7 +918,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i64.nxv2i64( +declare @llvm.riscv.vasubu.nxv2i64.nxv2i64( , , , @@ -932,7 +932,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v10 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vasubu.nxv2i64.nxv2i64( undef, %0, %1, @@ -941,7 +941,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i64.nxv2i64( +declare @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64( , , , @@ -956,7 +956,7 @@ ; CHECK-NEXT: vasubu.vv v8, v10, v12, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i64.nxv2i64( + %a = call @llvm.riscv.vasubu.mask.nxv2i64.nxv2i64( %0, %1, %2, @@ -966,7 +966,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i64.nxv4i64( +declare @llvm.riscv.vasubu.nxv4i64.nxv4i64( , , , @@ -980,7 +980,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v12 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vasubu.nxv4i64.nxv4i64( undef, %0, %1, @@ -989,7 +989,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i64.nxv4i64( +declare @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64( , , , @@ -1004,7 +1004,7 @@ ; CHECK-NEXT: vasubu.vv v8, v12, v16, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i64.nxv4i64( + %a = call @llvm.riscv.vasubu.mask.nxv4i64.nxv4i64( %0, %1, %2, @@ -1014,7 +1014,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i64.nxv8i64( +declare @llvm.riscv.vasubu.nxv8i64.nxv8i64( , , , @@ -1028,7 +1028,7 @@ ; CHECK-NEXT: vasubu.vv v8, v8, v16 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vasubu.nxv8i64.nxv8i64( undef, %0, %1, @@ -1037,7 +1037,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i64.nxv8i64( +declare @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64( , , , @@ -1053,7 +1053,7 @@ ; CHECK-NEXT: vasubu.vv v8, v16, v24, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i64.nxv8i64( + %a = call @llvm.riscv.vasubu.mask.nxv8i64.nxv8i64( %0, %1, %2, @@ -1063,7 +1063,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i8.i8( +declare @llvm.riscv.vasubu.nxv1i8.i8( , , i8, @@ -1077,7 +1077,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i8.i8( + %a = call @llvm.riscv.vasubu.nxv1i8.i8( undef, %0, i8 %1, @@ -1086,7 +1086,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i8.i8( +declare @llvm.riscv.vasubu.mask.nxv1i8.i8( , , i8, @@ -1101,7 +1101,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv1i8.i8( %0, %1, i8 %2, @@ -1111,7 +1111,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i8.i8( +declare @llvm.riscv.vasubu.nxv2i8.i8( , , i8, @@ -1125,7 +1125,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i8.i8( + %a = call @llvm.riscv.vasubu.nxv2i8.i8( undef, %0, i8 %1, @@ -1134,7 +1134,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i8.i8( +declare @llvm.riscv.vasubu.mask.nxv2i8.i8( , , i8, @@ -1149,7 +1149,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv2i8.i8( %0, %1, i8 %2, @@ -1159,7 +1159,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i8.i8( +declare @llvm.riscv.vasubu.nxv4i8.i8( , , i8, @@ -1173,7 +1173,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i8.i8( + %a = call @llvm.riscv.vasubu.nxv4i8.i8( undef, %0, i8 %1, @@ -1182,7 +1182,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i8.i8( +declare @llvm.riscv.vasubu.mask.nxv4i8.i8( , , i8, @@ -1197,7 +1197,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv4i8.i8( %0, %1, i8 %2, @@ -1207,7 +1207,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i8.i8( +declare @llvm.riscv.vasubu.nxv8i8.i8( , , i8, @@ -1221,7 +1221,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i8.i8( + %a = call @llvm.riscv.vasubu.nxv8i8.i8( undef, %0, i8 %1, @@ -1230,7 +1230,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i8.i8( +declare @llvm.riscv.vasubu.mask.nxv8i8.i8( , , i8, @@ -1245,7 +1245,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv8i8.i8( %0, %1, i8 %2, @@ -1255,7 +1255,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv16i8.i8( +declare @llvm.riscv.vasubu.nxv16i8.i8( , , i8, @@ -1269,7 +1269,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv16i8.i8( + %a = call @llvm.riscv.vasubu.nxv16i8.i8( undef, %0, i8 %1, @@ -1278,7 +1278,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv16i8.i8( +declare @llvm.riscv.vasubu.mask.nxv16i8.i8( , , i8, @@ -1293,7 +1293,7 @@ ; CHECK-NEXT: vasubu.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv16i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv16i8.i8( %0, %1, i8 %2, @@ -1303,7 +1303,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv32i8.i8( +declare @llvm.riscv.vasubu.nxv32i8.i8( , , i8, @@ -1317,7 +1317,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv32i8.i8( + %a = call @llvm.riscv.vasubu.nxv32i8.i8( undef, %0, i8 %1, @@ -1326,7 +1326,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv32i8.i8( +declare @llvm.riscv.vasubu.mask.nxv32i8.i8( , , i8, @@ -1341,7 +1341,7 @@ ; CHECK-NEXT: vasubu.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv32i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv32i8.i8( %0, %1, i8 %2, @@ -1351,7 +1351,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv64i8.i8( +declare @llvm.riscv.vasubu.nxv64i8.i8( , , i8, @@ -1365,7 +1365,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv64i8.i8( + %a = call @llvm.riscv.vasubu.nxv64i8.i8( undef, %0, i8 %1, @@ -1374,7 +1374,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv64i8.i8( +declare @llvm.riscv.vasubu.mask.nxv64i8.i8( , , i8, @@ -1389,7 +1389,7 @@ ; CHECK-NEXT: vasubu.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv64i8.i8( + %a = call @llvm.riscv.vasubu.mask.nxv64i8.i8( %0, %1, i8 %2, @@ -1399,7 +1399,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i16.i16( +declare @llvm.riscv.vasubu.nxv1i16.i16( , , i16, @@ -1413,7 +1413,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i16.i16( + %a = call @llvm.riscv.vasubu.nxv1i16.i16( undef, %0, i16 %1, @@ -1422,7 +1422,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i16.i16( +declare @llvm.riscv.vasubu.mask.nxv1i16.i16( , , i16, @@ -1437,7 +1437,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i16.i16( + %a = call @llvm.riscv.vasubu.mask.nxv1i16.i16( %0, %1, i16 %2, @@ -1447,7 +1447,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i16.i16( +declare @llvm.riscv.vasubu.nxv2i16.i16( , , i16, @@ -1461,7 +1461,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i16.i16( + %a = call @llvm.riscv.vasubu.nxv2i16.i16( undef, %0, i16 %1, @@ -1470,7 +1470,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i16.i16( +declare @llvm.riscv.vasubu.mask.nxv2i16.i16( , , i16, @@ -1485,7 +1485,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i16.i16( + %a = call @llvm.riscv.vasubu.mask.nxv2i16.i16( %0, %1, i16 %2, @@ -1495,7 +1495,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i16.i16( +declare @llvm.riscv.vasubu.nxv4i16.i16( , , i16, @@ -1509,7 +1509,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i16.i16( + %a = call @llvm.riscv.vasubu.nxv4i16.i16( undef, %0, i16 %1, @@ -1518,7 +1518,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i16.i16( +declare @llvm.riscv.vasubu.mask.nxv4i16.i16( , , i16, @@ -1533,7 +1533,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i16.i16( + %a = call @llvm.riscv.vasubu.mask.nxv4i16.i16( %0, %1, i16 %2, @@ -1543,7 +1543,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i16.i16( +declare @llvm.riscv.vasubu.nxv8i16.i16( , , i16, @@ -1557,7 +1557,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i16.i16( + %a = call @llvm.riscv.vasubu.nxv8i16.i16( undef, %0, i16 %1, @@ -1566,7 +1566,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i16.i16( +declare @llvm.riscv.vasubu.mask.nxv8i16.i16( , , i16, @@ -1581,7 +1581,7 @@ ; CHECK-NEXT: vasubu.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i16.i16( + %a = call @llvm.riscv.vasubu.mask.nxv8i16.i16( %0, %1, i16 %2, @@ -1591,7 +1591,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv16i16.i16( +declare @llvm.riscv.vasubu.nxv16i16.i16( , , i16, @@ -1605,7 +1605,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv16i16.i16( + %a = call @llvm.riscv.vasubu.nxv16i16.i16( undef, %0, i16 %1, @@ -1614,7 +1614,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv16i16.i16( +declare @llvm.riscv.vasubu.mask.nxv16i16.i16( , , i16, @@ -1629,7 +1629,7 @@ ; CHECK-NEXT: vasubu.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv16i16.i16( + %a = call @llvm.riscv.vasubu.mask.nxv16i16.i16( %0, %1, i16 %2, @@ -1639,7 +1639,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv32i16.i16( +declare @llvm.riscv.vasubu.nxv32i16.i16( , , i16, @@ -1653,7 +1653,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv32i16.i16( + %a = call @llvm.riscv.vasubu.nxv32i16.i16( undef, %0, i16 %1, @@ -1662,7 +1662,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv32i16.i16( +declare @llvm.riscv.vasubu.mask.nxv32i16.i16( , , i16, @@ -1677,7 +1677,7 @@ ; CHECK-NEXT: vasubu.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv32i16.i16( + %a = call @llvm.riscv.vasubu.mask.nxv32i16.i16( %0, %1, i16 %2, @@ -1687,7 +1687,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i32.i32( +declare @llvm.riscv.vasubu.nxv1i32.i32( , , i32, @@ -1701,7 +1701,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i32.i32( + %a = call @llvm.riscv.vasubu.nxv1i32.i32( undef, %0, i32 %1, @@ -1710,7 +1710,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i32.i32( +declare @llvm.riscv.vasubu.mask.nxv1i32.i32( , , i32, @@ -1725,7 +1725,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i32.i32( + %a = call @llvm.riscv.vasubu.mask.nxv1i32.i32( %0, %1, i32 %2, @@ -1735,7 +1735,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i32.i32( +declare @llvm.riscv.vasubu.nxv2i32.i32( , , i32, @@ -1749,7 +1749,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i32.i32( + %a = call @llvm.riscv.vasubu.nxv2i32.i32( undef, %0, i32 %1, @@ -1758,7 +1758,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i32.i32( +declare @llvm.riscv.vasubu.mask.nxv2i32.i32( , , i32, @@ -1773,7 +1773,7 @@ ; CHECK-NEXT: vasubu.vx v8, v9, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i32.i32( + %a = call @llvm.riscv.vasubu.mask.nxv2i32.i32( %0, %1, i32 %2, @@ -1783,7 +1783,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i32.i32( +declare @llvm.riscv.vasubu.nxv4i32.i32( , , i32, @@ -1797,7 +1797,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i32.i32( + %a = call @llvm.riscv.vasubu.nxv4i32.i32( undef, %0, i32 %1, @@ -1806,7 +1806,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i32.i32( +declare @llvm.riscv.vasubu.mask.nxv4i32.i32( , , i32, @@ -1821,7 +1821,7 @@ ; CHECK-NEXT: vasubu.vx v8, v10, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i32.i32( + %a = call @llvm.riscv.vasubu.mask.nxv4i32.i32( %0, %1, i32 %2, @@ -1831,7 +1831,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i32.i32( +declare @llvm.riscv.vasubu.nxv8i32.i32( , , i32, @@ -1845,7 +1845,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i32.i32( + %a = call @llvm.riscv.vasubu.nxv8i32.i32( undef, %0, i32 %1, @@ -1854,7 +1854,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i32.i32( +declare @llvm.riscv.vasubu.mask.nxv8i32.i32( , , i32, @@ -1869,7 +1869,7 @@ ; CHECK-NEXT: vasubu.vx v8, v12, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i32.i32( + %a = call @llvm.riscv.vasubu.mask.nxv8i32.i32( %0, %1, i32 %2, @@ -1879,7 +1879,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv16i32.i32( +declare @llvm.riscv.vasubu.nxv16i32.i32( , , i32, @@ -1893,7 +1893,7 @@ ; CHECK-NEXT: vasubu.vx v8, v8, a0 ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv16i32.i32( + %a = call @llvm.riscv.vasubu.nxv16i32.i32( undef, %0, i32 %1, @@ -1902,7 +1902,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv16i32.i32( +declare @llvm.riscv.vasubu.mask.nxv16i32.i32( , , i32, @@ -1917,7 +1917,7 @@ ; CHECK-NEXT: vasubu.vx v8, v16, a0, v0.t ; CHECK-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv16i32.i32( + %a = call @llvm.riscv.vasubu.mask.nxv16i32.i32( %0, %1, i32 %2, @@ -1927,7 +1927,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv1i64.i64( +declare @llvm.riscv.vasubu.nxv1i64.i64( , , i64, @@ -1954,7 +1954,7 @@ ; RV64-NEXT: vasubu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv1i64.i64( + %a = call @llvm.riscv.vasubu.nxv1i64.i64( undef, %0, i64 %1, @@ -1963,7 +1963,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv1i64.i64( +declare @llvm.riscv.vasubu.mask.nxv1i64.i64( , , i64, @@ -1991,7 +1991,7 @@ ; RV64-NEXT: vasubu.vx v8, v9, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv1i64.i64( + %a = call @llvm.riscv.vasubu.mask.nxv1i64.i64( %0, %1, i64 %2, @@ -2001,7 +2001,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv2i64.i64( +declare @llvm.riscv.vasubu.nxv2i64.i64( , , i64, @@ -2028,7 +2028,7 @@ ; RV64-NEXT: vasubu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv2i64.i64( + %a = call @llvm.riscv.vasubu.nxv2i64.i64( undef, %0, i64 %1, @@ -2037,7 +2037,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv2i64.i64( +declare @llvm.riscv.vasubu.mask.nxv2i64.i64( , , i64, @@ -2065,7 +2065,7 @@ ; RV64-NEXT: vasubu.vx v8, v10, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv2i64.i64( + %a = call @llvm.riscv.vasubu.mask.nxv2i64.i64( %0, %1, i64 %2, @@ -2075,7 +2075,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv4i64.i64( +declare @llvm.riscv.vasubu.nxv4i64.i64( , , i64, @@ -2102,7 +2102,7 @@ ; RV64-NEXT: vasubu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv4i64.i64( + %a = call @llvm.riscv.vasubu.nxv4i64.i64( undef, %0, i64 %1, @@ -2111,7 +2111,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv4i64.i64( +declare @llvm.riscv.vasubu.mask.nxv4i64.i64( , , i64, @@ -2139,7 +2139,7 @@ ; RV64-NEXT: vasubu.vx v8, v12, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv4i64.i64( + %a = call @llvm.riscv.vasubu.mask.nxv4i64.i64( %0, %1, i64 %2, @@ -2149,7 +2149,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.nxv8i64.i64( +declare @llvm.riscv.vasubu.nxv8i64.i64( , , i64, @@ -2176,7 +2176,7 @@ ; RV64-NEXT: vasubu.vx v8, v8, a0 ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.nxv8i64.i64( + %a = call @llvm.riscv.vasubu.nxv8i64.i64( undef, %0, i64 %1, @@ -2185,7 +2185,7 @@ ret %a } -declare @llvm.riscv.vasubu.rm.mask.nxv8i64.i64( +declare @llvm.riscv.vasubu.mask.nxv8i64.i64( , , i64, @@ -2213,7 +2213,7 @@ ; RV64-NEXT: vasubu.vx v8, v16, a0, v0.t ; RV64-NEXT: ret entry: - %a = call @llvm.riscv.vasubu.rm.mask.nxv8i64.i64( + %a = call @llvm.riscv.vasubu.mask.nxv8i64.i64( %0, %1, i64 %2,